diff --git a/.github/workflows/jira_sync.yml b/.github/workflows/jira_sync.yml new file mode 100644 index 0000000..1981b76 --- /dev/null +++ b/.github/workflows/jira_sync.yml @@ -0,0 +1,11 @@ +name: Sync GitHub issues to Jira +on: [issues, issue_comment] + +jobs: + sync-issues: + name: Sync issues to Jira + runs-on: ubuntu-latest + steps: + - uses: canonical/sync-issues-github-jira@v1 + with: + webhook-url: ${{ secrets.JIRA_WEBHOOK_URL }} \ No newline at end of file diff --git a/examples/00_reference/04_pulse_library.ipynb b/examples/00_reference/04_pulse_library.ipynb index ea99ccc..3e2110a 100644 --- a/examples/00_reference/04_pulse_library.ipynb +++ b/examples/00_reference/04_pulse_library.ipynb @@ -11,7 +11,9 @@ "\n", "- How to use pre-defined pulses out of the box, and how to sweep their parameters [here](#sweep-parameters-of-an-out-of-the-box-pulse)\n", "- How to define your own, parameterized pulses and sweep their parameters [here](#define-a-new-pulse-type-and-sweep-it)\n", - "- How to define sampled pulses, e.g., from a `numpy` array [here](#create-a-sampled-pulse-from-an-array-of-sampling-points)" + "- How to define sampled pulses, e.g., from a `numpy` array [here](#create-a-sampled-pulse-from-an-array-of-sampling-points)\n", + "\n", + "A demonstration of this notebook is also available on our Youtube channel [here](https://www.youtube.com/watch?v=20sqtgs281Y&list=PLjxUCNDRYw8k1_HTzXDohUHKhYKYFQrbn&index=3&ab_channel=ZurichInstruments)" ] }, { diff --git a/examples/00_reference/09_output_simulator.ipynb b/examples/00_reference/09_output_simulator.ipynb index 9cc66d6..fbcf8c9 100644 --- a/examples/00_reference/09_output_simulator.ipynb +++ b/examples/00_reference/09_output_simulator.ipynb @@ -10,7 +10,9 @@ "\n", "LabOne Q can simulate the output of each channel in a sample-precise way. This feature can be used to check experiments even before they are executed on hardware. Combine it with the pulse sheet viewer to get a multi-scale overview over pulses and sequences.\n", "\n", - "This notebook will use an amplitude Rabi experiment to demonstrate some use cases of the output simulator." + "This notebook will use an amplitude Rabi experiment to demonstrate some use cases of the output simulator.\n", + "\n", + "For more examples also have a look at the demo video on our Youtube channel [here](https://www.youtube.com/watch?v=hov1pY-XyOY&list=PLjxUCNDRYw8k1_HTzXDohUHKhYKYFQrbn&index=2&ab_channel=ZurichInstruments)" ] }, { diff --git a/examples/01_qubit_characterization/01_cw_resonator_spec_shfsg_shfqa_shfqc.ipynb b/examples/01_qubit_characterization/01_cw_resonator_spec_shfsg_shfqa_shfqc.ipynb index acc29b5..1303917 100644 --- a/examples/01_qubit_characterization/01_cw_resonator_spec_shfsg_shfqa_shfqc.ipynb +++ b/examples/01_qubit_characterization/01_cw_resonator_spec_shfsg_shfqa_shfqc.ipynb @@ -7,7 +7,9 @@ "source": [ "# Resonator Spectroscopy with SHFQA or SHFQC\n", "\n", - "This notebook shows you how to perform CW resonator spectroscopy in LabOne Q with a SHFQA or the quantum analyzer channels of a SHFQC. Here, you'll find the resonance frequency of the qubit readout resonator by looking at the transmission or reflection of a probe signal applied through the readout line." + "This notebook shows you how to perform CW resonator spectroscopy in LabOne Q with a SHFQA or the quantum analyzer channels of a SHFQC. Here, you'll find the resonance frequency of the qubit readout resonator by looking at the transmission or reflection of a probe signal applied through the readout line.\n", + "\n", + "A demonstration of this notebook, starting from the basics of installing LabOne Q, is also available on our Youtube channel [here](https://www.youtube.com/watch?v=aRaGHNZeVkI&list=PLjxUCNDRYw8k1_HTzXDohUHKhYKYFQrbn&index=1&ab_channel=ZurichInstruments)" ] }, { @@ -282,9 +284,9 @@ ], "metadata": { "kernelspec": { - "display_name": "L1Q_local", + "display_name": "develop", "language": "python", - "name": "python3" + "name": "develop" }, "language_info": { "codemirror_mode": { @@ -296,7 +298,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.11" + "version": "3.11.0" }, "orig_nbformat": 4 }, diff --git a/laboneq/VERSION.txt b/laboneq/VERSION.txt index 9aa3464..6533b66 100644 --- a/laboneq/VERSION.txt +++ b/laboneq/VERSION.txt @@ -1 +1 @@ -2.7.0 \ No newline at end of file +2.8.0 \ No newline at end of file diff --git a/laboneq/application_management/__init__.py b/laboneq/application_management/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/application_management/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/application_management/application_manager.py b/laboneq/application_management/application_manager.py new file mode 100644 index 0000000..daaf2df --- /dev/null +++ b/laboneq/application_management/application_manager.py @@ -0,0 +1,68 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import logging +from dataclasses import dataclass + +from lagom import Container, Singleton + +from laboneq.implementation.compilation_service.compilation_service_legacy import ( + CompilationServiceLegacy, +) +from laboneq.implementation.experiment_workflow import ExperimentWorkflow +from laboneq.implementation.payload_builder.payload_builder import PayloadBuilder +from laboneq.implementation.runner.runner_legacy import RunnerLegacy +from laboneq.interfaces.application_management.laboneq_settings import LabOneQSettings +from laboneq.interfaces.compilation_service.compilation_service_api import ( + CompilationServiceAPI, +) +from laboneq.interfaces.experiment.experiment_api import ExperimentAPI +from laboneq.interfaces.payload_builder.payload_builder_api import PayloadBuilderAPI +from laboneq.interfaces.runner.runner_api import RunnerAPI +from laboneq.interfaces.runner.runner_control_api import RunnerControlAPI + +_logger = logging.getLogger(__name__) + + +@dataclass(init=False) +class LaboneQDefaultSettings(LabOneQSettings): + def __init__(self): + self.runner_is_local = True + self.compilation_service_is_local = True + + # TODO: use a configration file or environment variables to set these values + # Maybe use dotenv (see https://pypi.org/project/python-dotenv/) + # and/or dynaconf (see https://www.dynaconf.com/) + runner_is_local: bool + compilation_service_is_local: bool + + +class ApplicationManager: + _instance = None + + def __init__(self): + self._experimenter_api = None + + def start(self): + if self._experimenter_api is not None: + _logger.warning("ApplicationManager already started.") + return + container = Container(log_undefined_deps=True) + container[LabOneQSettings] = LaboneQDefaultSettings + container[RunnerControlAPI] = Singleton(RunnerLegacy) + # RunnerControlAPI and the RunnerAPI are currently implemented by the same object: + container[RunnerAPI] = container[RunnerControlAPI] + container[CompilationServiceAPI] = CompilationServiceLegacy + container[PayloadBuilderAPI] = PayloadBuilder + container[ExperimentAPI] = ExperimentWorkflow + self._experimenter_api = container[ExperimentAPI] + + def laboneq(self) -> ExperimentAPI: + return self._experimenter_api + + @staticmethod + def instance() -> "ApplicationManager": + if ApplicationManager._instance is None: + ApplicationManager._instance = ApplicationManager() + ApplicationManager._instance.start() + return ApplicationManager._instance diff --git a/laboneq/compiler/code_generator/analyze_events.py b/laboneq/compiler/code_generator/analyze_events.py index 6805878..b5904db 100644 --- a/laboneq/compiler/code_generator/analyze_events.py +++ b/laboneq/compiler/code_generator/analyze_events.py @@ -293,9 +293,9 @@ def analyze_set_oscillator_times( signal_obj: SignalObj, ) -> AWGSampledEventSequence: signal_id = signal_obj.id - device_id = signal_obj.device_id - device_type = signal_obj.device_type - sampling_rate = signal_obj.sampling_rate + device_id = signal_obj.awg.device_id + device_type = signal_obj.awg.device_type + sampling_rate = signal_obj.awg.sampling_rate delay = signal_obj.total_delay set_oscillator_events = [ event @@ -356,9 +356,9 @@ def analyze_acquire_times( ) -> AWGSampledEventSequence: signal_id = signal_obj.id - sampling_rate = signal_obj.sampling_rate + sampling_rate = signal_obj.awg.sampling_rate delay = signal_obj.total_delay - sample_multiple = signal_obj.device_type.sample_multiple + sample_multiple = signal_obj.awg.device_type.sample_multiple channels = signal_obj.channels _logger.debug( @@ -465,7 +465,7 @@ def analyze_trigger_events( and signal.id == event["signal"] ] delay = signal.total_delay - sampling_rate = signal.sampling_rate + sampling_rate = signal.awg.sampling_rate device_type = signal.awg.device_type sampled_digital_signal_change_events = AWGSampledEventSequence() diff --git a/laboneq/compiler/code_generator/analyze_playback.py b/laboneq/compiler/code_generator/analyze_playback.py index 60fe793..15a921d 100644 --- a/laboneq/compiler/code_generator/analyze_playback.py +++ b/laboneq/compiler/code_generator/analyze_playback.py @@ -458,7 +458,7 @@ def _make_pulse_signature(pulse_iv: Interval, wave_iv: Interval, signal_ids: Lis def _interval_start_after_oscillator_reset( events, signals, compacted_intervals: IntervalTree, delay, sampling_rate ): - device_id = next(iter(signals.values())).device_id + device_id = next(iter(signals.values())).awg.device_id osc_reset_event_time = [ length_to_samples(event["time"] + delay, sampling_rate) diff --git a/laboneq/compiler/code_generator/code_generator.py b/laboneq/compiler/code_generator/code_generator.py index 217717d..0f06893 100644 --- a/laboneq/compiler/code_generator/code_generator.py +++ b/laboneq/compiler/code_generator/code_generator.py @@ -7,11 +7,10 @@ import copy import logging import math -import os -import re +from collections import namedtuple from contextlib import suppress from itertools import groupby -from typing import Any, Dict, List, Tuple +from typing import Any, Dict, List, NamedTuple, Tuple import numpy as np from engineering_notation import EngNumber @@ -72,9 +71,7 @@ from laboneq.compiler.common.pulse_parameters import decode_pulse_parameters from laboneq.compiler.common.signal_obj import SignalObj from laboneq.compiler.common.trigger_mode import TriggerMode -from laboneq.compiler.experiment_access import ExperimentDAO from laboneq.compiler.experiment_access.pulse_def import PulseDef -from laboneq.compiler.experiment_access.section_info import SectionInfo from laboneq.core.exceptions import LabOneQException from laboneq.core.types.compiled_experiment import ( PulseInstance, @@ -198,7 +195,7 @@ def calculate_integration_weights( length = pulse_def.length if length is None: - length = len(samples) / signal_obj.sampling_rate + length = len(samples) / signal_obj.awg.sampling_rate _logger.debug( "Sampling integration weights for %s with modulation_frequency %s", @@ -214,7 +211,7 @@ def calculate_integration_weights( pulse_parameters = decode_pulse_parameters(pulse_parameters) integration_weight = sample_pulse( signal_type="iq", - sampling_rate=signal_obj.sampling_rate, + sampling_rate=signal_obj.awg.sampling_rate, length=length, amplitude=amplitude, pulse_function=pulse_def.function, @@ -232,7 +229,7 @@ def calculate_integration_weights( ) integration_weight["basename"] = ( - signal_obj.device_id + signal_obj.awg.device_id + "_" + str(signal_obj.awg.awg_number) + "_" @@ -302,7 +299,6 @@ def _add_signal_to_awg(self, signal_obj: SignalObj): awg_key = signal_obj.awg.key if awg_key not in self._awgs: self._awgs[awg_key] = copy.deepcopy(signal_obj.awg) - self._awgs[awg_key].device_type = signal_obj.device_type self._awgs[awg_key].signals.append(signal_obj) def add_signal(self, signal: SignalObj): @@ -343,7 +339,7 @@ def gen_waves(self): signal_weights = self._integration_weights[signal_obj.id] for weight in signal_weights.values(): basename = weight["basename"] - if signal_obj.device_type.supports_complex_waves: + if signal_obj.awg.device_type.supports_complex_waves: self._save_wave_bin( CodeGenerator.SHFQA_COMPLEX_SAMPLE_SCALING * (weight["samples_i"] - 1j * weight["samples_q"]), @@ -388,7 +384,7 @@ def gen_waves(self): if not sampled_signature: continue sig_string = signature_key.signature_string() - if signal_obj.device_type.supports_binary_waves: + if signal_obj.awg.device_type.supports_binary_waves: if awg.signal_type == AWGSignalType.SINGLE: self._save_wave_bin( sampled_signature["samples_i"], @@ -425,7 +421,7 @@ def gen_waves(self): "_marker2", ) - elif signal_obj.device_type.supports_complex_waves: + elif signal_obj.awg.device_type.supports_complex_waves: self._save_wave_bin( CodeGenerator.SHFQA_COMPLEX_SAMPLE_SCALING * ( @@ -438,7 +434,7 @@ def gen_waves(self): ) else: raise RuntimeError( - f"Device type {signal_obj.device_type} has invalid supported waves config." + f"Device type {signal_obj.awg.device_type} has invalid supported waves config." ) else: signal_obj = awg.signals[0] @@ -446,7 +442,7 @@ def gen_waves(self): virtual_signal_id ].items(): sig_string = signature_key.signature_string() - if signal_obj.device_type.supports_binary_waves: + if signal_obj.awg.device_type.supports_binary_waves: self._save_wave_bin( sampled_signature["samples_i"], sampled_signature["signature_pulse_map"], @@ -477,7 +473,7 @@ def gen_waves(self): else: raise RuntimeError( - f"Device type {signal_obj.device_type} has invalid supported waves config." + f"Device type {signal_obj.awg.device_type} has invalid supported waves config." ) # check that there are no duplicate filenames in the wave pool (QCSW-1079) @@ -494,74 +490,25 @@ def gen_waves(self): _logger.debug(self._waves) - def gen_acquire_map(self, events: EventList, sections: ExperimentDAO): - # todo (PW): this can EASILY be factored out into a separate file - loop_events = [ - e - for e in events - if e["event_type"] == "LOOP_ITERATION_END" and not e.get("shadow") - ] - averaging_loop_info: SectionInfo = None - innermost_loop: Dict[str, Any] = None - outermost_loop: Dict[str, Any] = None - for e in loop_events: - section_info = sections.section_info(e["section_name"]) - if section_info.averaging_type == "hardware": - averaging_loop_info = section_info - if ( - innermost_loop is None - or e["nesting_level"] > innermost_loop["nesting_level"] - ): - innermost_loop = e - if ( - outermost_loop is None - or e["nesting_level"] < outermost_loop["nesting_level"] - ): - outermost_loop = e - averaging_loop = ( - None - if averaging_loop_info is None - else innermost_loop - if averaging_loop_info.averaging_mode == "sequential" - else outermost_loop - ) - if ( - averaging_loop is not None - and averaging_loop["section_name"] != averaging_loop_info.section_id - ): - raise RuntimeError( - f"Internal error: couldn't unambiguously determine the hardware averaging loop - " - f"innermost '{innermost_loop['section_name']}', outermost '{outermost_loop['section_name']}', " - f"hw avg '{averaging_loop_info.section_id}' with mode '{averaging_loop_info.averaging_mode}' " - f"expected to match '{averaging_loop['section_name']}'" - ) - unrolled_avg_matcher = re.compile( - "(?!)" # Never match anything - if averaging_loop is None - else f"{averaging_loop['section_name']}_[0-9]+" - ) + def gen_acquire_map(self, events: EventList): # timestamp -> map[signal -> handle] self._simultaneous_acquires: Dict[float, Dict[str, str]] = {} for e in (e for e in events if e["event_type"] == "ACQUIRE_START"): - if e.get("shadow") and unrolled_avg_matcher.match(e.get("loop_iteration")): - continue # Skip events for unrolled averaging loop time_events = self._simultaneous_acquires.setdefault(e["time"], {}) time_events[e["signal"]] = e["acquire_handle"] def gen_seq_c(self, events: List[Any], pulse_defs: Dict[str, PulseDef]): - signal_keys = [ - "sampling_rate", - "id", - "device_id", - "device_type", - "delay_signal", - ] signal_info_map = { - id: {k: getattr(s, k) for k in signal_keys} - for id, s in self._signals.items() + signal_id: {"id": s.id, "delay_signal": s.delay_signal} + for signal_id, s in self._signals.items() } + for k, s in signal_info_map.items(): - s["awg_number"] = self._signals[k].awg.awg_number + signal_obj = self._signals[k] + s["sampling_rate"] = signal_obj.awg.sampling_rate + s["awg_number"] = signal_obj.awg.awg_number + s["device_id"] = signal_obj.awg.device_id + s["device_type"] = signal_obj.awg.device_type ( self._integration_times, @@ -646,12 +593,12 @@ def _calc_global_awg_params(awg: AWGInfo) -> Tuple[float, float]: relevant_delay = signal_obj.total_delay if ( - round(relevant_delay * signal_obj.sampling_rate) - % signal_obj.device_type.sample_multiple + round(relevant_delay * signal_obj.awg.sampling_rate) + % signal_obj.awg.device_type.sample_multiple != 0 ): raise RuntimeError( - f"Delay {relevant_delay} s = {round(relevant_delay*signal_obj.sampling_rate)} samples on signal {signal_obj.id} is not compatible with the sample multiple of {signal_obj.device_type.sample_multiple} on {signal_obj.device_type}" + f"Delay {relevant_delay} s = {round(relevant_delay*signal_obj.awg.sampling_rate)} samples on signal {signal_obj.id} is not compatible with the sample multiple of {signal_obj.awg.device_type.sample_multiple} on {signal_obj.awg.device_type}" ) all_relevant_delays[signal_obj.id] = relevant_delay @@ -673,7 +620,7 @@ def _calc_global_awg_params(awg: AWGInfo) -> Tuple[float, float]: else: global_delay = relevant_delay - global_sampling_rate = signal_obj.sampling_rate + global_sampling_rate = signal_obj.awg.sampling_rate signals_so_far.add(signal_obj.id) if global_delay is None: @@ -737,6 +684,19 @@ def _emit_new_awg_events(self, old_event, new_events): time += new_length return new_awg_events, pulse_name_mapping + def _pulses_compatible_for_compression(self, pulses: List[NamedTuple]): + sorted_pulses = sorted(pulses, key=lambda x: x.start) + n = len(sorted_pulses) + + for i in range(n - 1): + pi = sorted_pulses[i] + pj = sorted_pulses[i + 1] + + if pi.end > pj.start and pi.can_compress != pj.can_compress: + return False + + return True + def _compress_waves( self, sampled_events, sampled_signatures, signal_id, min_play_wave, pulse_defs ): @@ -758,7 +718,7 @@ def _compress_waves( if len(pulses_not_in_pulsedef) > 0: continue - if any( + if all( not pulse_defs[pulse.pulse].can_compress for pulse in wave_form.pulses ): @@ -774,8 +734,30 @@ def _compress_waves( ) if k in sampled_signature } + pulse_compr_info = namedtuple( + "PulseComprInfo", ["start", "end", "can_compress"] + ) + pulse_compr_infos = [ + pulse_compr_info( + start=pulse.start, + end=pulse.start + pulse.length, + can_compress=pulse_defs[pulse.pulse].can_compress, + ) + for pulse in wave_form.pulses + ] + if not self._pulses_compatible_for_compression(pulse_compr_infos): + raise LabOneQException( + "overlapping pulses need to either all have can_compress=True or can_compress=False" + ) + compressible_segments = [ + (pulse.start, pulse.start + pulse.length) + for pulse in wave_form.pulses + if pulse_defs[pulse.pulse].can_compress + ] + # remove duplicates, keep order + compressible_segments = [*dict.fromkeys(compressible_segments)] new_events = self._wave_compressor.compress_wave( - sample_dict, min_play_wave + sample_dict, min_play_wave, compressible_segments ) pulse_names = [pulse.pulse for pulse in wave_form.pulses] if new_events is None: @@ -998,8 +980,8 @@ def _gen_seq_c_per_awg( _logger.debug("Multi signal %s", awg) assert len(awg.signals) > 0 delay = 0.0 - device_type = awg.signals[0].device_type - sampling_rate = awg.signals[0].sampling_rate + device_type = awg.signals[0].awg.device_type + sampling_rate = awg.signals[0].awg.sampling_rate mixer_type = awg.signals[0].mixer_type for signal_obj in awg.signals: if signal_obj.signal_type != "integration": @@ -1038,7 +1020,7 @@ def _gen_seq_c_per_awg( sampled_events.merge(interval_events) min_play_waves = [ - signal.device_type.min_play_wave for signal in awg.signals + signal.awg.device_type.min_play_wave for signal in awg.signals ] assert all( min_play_wave == min_play_waves[0] for min_play_wave in min_play_waves @@ -1070,20 +1052,20 @@ def _gen_seq_c_per_awg( elif awg.signal_type != AWGSignalType.DOUBLE: for signal_obj in awg.signals: - if signal_obj.device_type == DeviceType.SHFQA: + if signal_obj.awg.device_type == DeviceType.SHFQA: sub_channel = signal_obj.channels[0] else: sub_channel = None interval_events = analyze_play_wave_times( events=events, signals={signal_obj.id: signal_obj}, - device_type=signal_obj.device_type, - sampling_rate=signal_obj.sampling_rate, + device_type=signal_obj.awg.device_type, + sampling_rate=signal_obj.awg.sampling_rate, delay=signal_obj.total_delay, other_events=sampled_events, phase_resolution_range=self.phase_resolution_range(), waveform_size_hints=self.waveform_size_hints( - signal_obj.device_type + signal_obj.awg.device_type ), sub_channel=sub_channel, use_command_table=use_command_table, @@ -1093,9 +1075,9 @@ def _gen_seq_c_per_awg( signal_obj.id, interval_events, pulse_defs=pulse_defs, - sampling_rate=signal_obj.sampling_rate, + sampling_rate=signal_obj.awg.sampling_rate, signal_type=signal_obj.signal_type, - device_type=signal_obj.device_type, + device_type=signal_obj.awg.device_type, mixer_type=signal_obj.mixer_type, ) @@ -1106,7 +1088,7 @@ def _gen_seq_c_per_awg( sampled_events=sampled_events, sampled_signatures=sampled_signatures, signal_id=signal_obj.id, - min_play_wave=signal_obj.device_type.min_play_wave, + min_play_wave=signal_obj.awg.device_type.min_play_wave, pulse_defs=pulse_defs, ) @@ -1129,7 +1111,7 @@ def _gen_seq_c_per_awg( for siginfo in sorted(signature_infos): declarations_generator.add_wave_declaration( - signal_obj.device_type, + signal_obj.awg.device_type, signal_obj.signal_type, siginfo[0], siginfo[1], @@ -1143,12 +1125,12 @@ def _gen_seq_c_per_awg( interval_events = analyze_play_wave_times( events=events, signals={s.id: s for s in awg.signals}, - device_type=signal_a.device_type, - sampling_rate=signal_a.sampling_rate, + device_type=signal_a.awg.device_type, + sampling_rate=signal_a.awg.sampling_rate, delay=signal_a.total_delay, other_events=sampled_events, phase_resolution_range=self.phase_resolution_range(), - waveform_size_hints=self.waveform_size_hints(signal_a.device_type), + waveform_size_hints=self.waveform_size_hints(signal_a.awg.device_type), use_command_table=False, # do not set amplitude/oscillator phase via CT ) @@ -1156,9 +1138,9 @@ def _gen_seq_c_per_awg( virtual_signal_id, interval_events, pulse_defs=pulse_defs, - sampling_rate=signal_a.sampling_rate, + sampling_rate=signal_a.awg.sampling_rate, signal_type=signal_a.signal_type, - device_type=signal_a.device_type, + device_type=signal_a.awg.device_type, mixer_type=signal_a.mixer_type, ) @@ -1166,14 +1148,15 @@ def _gen_seq_c_per_awg( sampled_events.merge(interval_events) assert ( - signal_a.device_type.min_play_wave == signal_b.device_type.min_play_wave + signal_a.awg.device_type.min_play_wave + == signal_b.awg.device_type.min_play_wave ) self._compress_waves( sampled_events=sampled_events, sampled_signatures=sampled_signatures, signal_id=virtual_signal_id, - min_play_wave=signal_a.device_type.min_play_wave, + min_play_wave=signal_a.awg.device_type.min_play_wave, pulse_defs=pulse_defs, ) @@ -1184,7 +1167,7 @@ def _gen_seq_c_per_awg( sig_string = sig.signature_string() length = sig.length declarations_generator.add_wave_declaration( - signal_a.device_type, + awg.device_type, awg.signal_type.value, sig_string, length, @@ -1268,7 +1251,7 @@ def _gen_seq_c_per_awg( self._src.append({"filename": filename, "text": seq_c_text}) self._wave_indices_all.append( { - "filename": os.path.splitext(filename)[0] + "_waveindices.csv", + "filename": filename, "value": handler.wave_indices.wave_indices(), } ) @@ -1335,7 +1318,6 @@ def _sample_pulses( sampled_signatures[signature.waveform] = None _logger.debug("Signatures: %s", signatures) - max_amplitude = 0.0 needs_conjugate = device_type == DeviceType.SHFSG for signature in signatures: length = signature.waveform.length @@ -1395,9 +1377,6 @@ def _sample_pulses( if pulse_part.amplitude is not None: amplitude *= pulse_part.amplitude - if abs(amplitude) > max_amplitude: - max_amplitude = abs(amplitude) - oscillator_phase = pulse_part.oscillator_phase baseband_phase = pulse_part.baseband_phase @@ -1590,8 +1569,7 @@ def _sample_pulses( if has_marker2: sampled_pulse_obj["samples_marker2"] = samples_marker2 - if max_amplitude > 1e-9: - sampled_signatures[signature.waveform] = sampled_pulse_obj + sampled_signatures[signature.waveform] = sampled_pulse_obj verify_amplitude_no_clipping( sampled_pulse_obj, diff --git a/laboneq/compiler/code_generator/seq_c_generator.py b/laboneq/compiler/code_generator/seq_c_generator.py index 8ae7ea3..0259f57 100644 --- a/laboneq/compiler/code_generator/seq_c_generator.py +++ b/laboneq/compiler/code_generator/seq_c_generator.py @@ -53,7 +53,6 @@ def string_sanitize(input): class SeqCGenerator: def __init__(self): - self._seq_c_text = "" self._statements: List[SeqCStatement] = [] def num_statements(self): @@ -319,82 +318,82 @@ def add_play_hold_statement( ) def generate_seq_c(self): - self._seq_c_text = "" + seq_c_statements = [] for statement in self._statements: _logger.debug("processing statement %s", statement) - self.emit_statement(statement) - return self._seq_c_text + seq_c_statements.append(self.emit_statement(statement)) + return "".join(seq_c_statements) def emit_statement(self, statement: SeqCStatement): if statement["type"] == "generic_statement": if "assign_to" in statement: - self._seq_c_text += f"{statement['assign_to']} = " - self._seq_c_text += statement["function"] + "(" + assign_to = f"{statement['assign_to']} = " + else: + assign_to = "" if "args" in statement: - is_first = True - for arg in statement["args"]: - if not is_first: - self._seq_c_text += "," - else: - is_first = False - self._seq_c_text += str(arg) - self._seq_c_text += ");\n" + args = ",".join(str(s) for s in statement["args"]) + else: + args = "" + return f"{assign_to}{statement['function']}({args});\n" elif statement["type"] == "wave_declaration": if statement["device_type"].supports_binary_waves: - self._seq_c_text += self._gen_wave_declaration_placeholder(statement) + return self._gen_wave_declaration_placeholder(statement) + else: + return "" + elif statement["type"] == "function_def": - self._seq_c_text += statement["text"] + return statement["text"] + elif statement["type"] == "variable_declaration": - self._seq_c_text += "var " + statement["variable_name"] if "initial_value" in statement: - self._seq_c_text += " = " + str(statement["initial_value"]) + ";\n" + initial_value = f" = {statement['initial_value']}" else: - self._seq_c_text += ";\n" + initial_value = "" + return f"var {statement['variable_name']}{initial_value};\n" + elif statement["type"] == "variable_assignment": - self._seq_c_text += statement["variable_name"] - self._seq_c_text += " = " + str(statement["value"]) + ";\n" + return f"{statement['variable_name']} = {statement['value']};\n" + elif statement["type"] == "variable_increment": - self._seq_c_text += statement["variable_name"] - self._seq_c_text += " += " + str(statement["value"]) + ";\n" + return f"{statement['variable_name']} += {statement['value']};\n" elif statement["type"] == "do_while": - self._seq_c_text += "do {\n" - self._seq_c_text += textwrap.indent( - statement["body"].generate_seq_c(), " " - ) - self._seq_c_text += "}\nwhile(" + statement["condition"] + ");\n" + body = textwrap.indent(statement["body"].generate_seq_c(), " ") + return f"do {{\n{body}}}\nwhile({statement['condition']});\n" elif statement["type"] == "repeat": - self._seq_c_text += f"repeat ({statement['num_repeats']}) {{\n" - self._seq_c_text += textwrap.indent( - statement["body"].generate_seq_c(), " " - ) - self._seq_c_text += "}\n" + body = textwrap.indent(statement["body"].generate_seq_c(), " ") + return f"repeat ({statement['num_repeats']}) {{\n{body}}}\n" elif statement["type"] == "assignWaveIndex": wave_channels = self._build_wave_channel_assignment(statement) - self._seq_c_text += ( - f'assignWaveIndex({wave_channels},{statement["wave_index"]});\n' - ) + return f'assignWaveIndex({wave_channels},{statement["wave_index"]});\n' + elif statement["type"] == "playWave": wave_channels = self._build_wave_channel_assignment(statement) - self._seq_c_text += f"playWave({wave_channels});\n" + return f"playWave({wave_channels});\n" + elif statement["type"] == "executeTableEntry": - self._seq_c_text += f"executeTableEntry({statement['table_index']}" latency = statement.get("latency", None) if latency is not None: - self._seq_c_text += f", {latency}" - self._seq_c_text += ");" + latency = f", {latency}" + else: + latency = "" if statement["comment"] != "": - self._seq_c_text += f" // {statement['comment']}" - self._seq_c_text += "\n" + comment = f" // {statement['comment']}" + else: + comment = "" + return f"executeTableEntry({statement['table_index']}{latency});{comment}\n" + elif statement["type"] == "comment": - self._seq_c_text += "/* " + statement["text"] + " */\n" + return "/* " + statement["text"] + " */\n" + elif statement["type"] == "playZero": - self._seq_c_text += f"playZero({statement['num_samples']});\n" + return f"playZero({statement['num_samples']});\n" + elif statement["type"] == "playHold": - self._seq_c_text += f"playHold({statement['num_samples']});\n" + return f"playHold({statement['num_samples']});\n" def _gen_wave_declaration_placeholder(self, statement: SeqCStatement) -> str: dual_channel = statement["signal_type"] in ["iq", "double", "multi"] @@ -410,10 +409,9 @@ def _gen_wave_declaration_placeholder(self, statement: SeqCStatement) -> str: makers_declaration2 = ",true" if dual_channel: - return ( f"wave w{sig_string}_i = placeholder({length}{makers_declaration1});\n" - + f"wave w{sig_string}_q = placeholder({length}{makers_declaration2});\n" + f"wave w{sig_string}_q = placeholder({length}{makers_declaration2});\n" ) else: return f"wave w{sig_string} = placeholder({length}{makers_declaration1});\n" diff --git a/laboneq/compiler/code_generator/signatures.py b/laboneq/compiler/code_generator/signatures.py index fdbceae..29aa288 100644 --- a/laboneq/compiler/code_generator/signatures.py +++ b/laboneq/compiler/code_generator/signatures.py @@ -5,7 +5,6 @@ import hashlib import math -from copy import deepcopy from dataclasses import dataclass, field from typing import Any, Dict, FrozenSet, Optional, Tuple @@ -207,8 +206,10 @@ def reduce_signature_phase( use_ct_phase: bool, prev_hw_oscillator_phase: Optional[float], ) -> PlaybackSignature: - signature = deepcopy(signature) + """Reduces the phase of the signature. + Modifies the passed in `signature` object in-place. + """ if use_ct_phase: this_hw_oscillator_phase = signature.waveform.pulses[-1].baseband_phase or 0.0 if prev_hw_oscillator_phase is not None: @@ -239,11 +240,13 @@ def reduce_signature_phase( def reduce_signature_amplitude(signature: PlaybackSignature) -> PlaybackSignature: - # Absorb the pulse amplitude into the command table. Whenever possible, the - # waveforms will be sampled at unit amplitude, making waveform reuse more likely. + """Reduces the amplitude of the signature. - signature = deepcopy(signature) + Modifies the passed in `signature` object in-place. + Absorb the pulse amplitude into the command table. Whenever possible, the + waveforms will be sampled at unit amplitude, making waveform reuse more likely. + """ if len(signature.waveform.pulses) == 0: return signature signature.set_amplitude = 1.0 diff --git a/laboneq/compiler/code_generator/wave_compressor.py b/laboneq/compiler/code_generator/wave_compressor.py index a80e7e6..e4da27a 100644 --- a/laboneq/compiler/code_generator/wave_compressor.py +++ b/laboneq/compiler/code_generator/wave_compressor.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from itertools import groupby -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Optional, Tuple, Union import numpy as np @@ -148,8 +148,10 @@ def _compress_wave_general( self, samples: Dict[str, np.array], stacked_samples: np.ndarray, + compressable_segments: List[Tuple[int, int]], num_sample_channles: int, num_frames: int, + num_samples: int, sample_multiple: int, ) -> Union[List[Union[PlayHold, PlaySamples]], None]: last_vals = np.zeros((num_sample_channles, num_frames)) @@ -157,12 +159,21 @@ def _compress_wave_general( _, hi = self._get_frame_idx(samples, sample_multiple, i, num_frames) last_vals[:, i] = stacked_samples[:, hi - 1] + compressable_frames = [] + for seg_lo, seg_hi in compressable_segments: + frame_lo = seg_lo // sample_multiple + 1 + frame_hi = ( + num_frames if seg_hi == num_samples else seg_hi // sample_multiple - 1 + ) + compressable_frames.append((frame_lo, frame_hi)) + can_compress = [False] * num_frames - for i in range(1, num_frames): - lo, hi = self._get_frame_idx(samples, sample_multiple, i, num_frames) - can_compress[i] = self._stacked_samples_constant( - stacked_samples, lo, hi - ) and np.all(last_vals[:, i - 1] == stacked_samples[:, lo]) + for frame_lo, frame_hi in compressable_frames: + for i in range(frame_lo, frame_hi): + lo, hi = self._get_frame_idx(samples, sample_multiple, i, num_frames) + can_compress[i] = self._stacked_samples_constant( + stacked_samples, lo, hi + ) and np.all(last_vals[:, i - 1] == stacked_samples[:, lo]) if not any(can_compress): return None @@ -201,23 +212,54 @@ def _compress_wave_general( return events def compress_wave( - self, samples: Dict[str, np.array], sample_multiple: int + self, + samples: Dict[str, np.array], + sample_multiple: int, + compressible_segments: Optional[List[Tuple[int, int]]] = None, ) -> Union[List[Union[PlayHold, PlaySamples]], None]: ref_length = len(list(samples.values())[0]) num_sample_channles = len(list(samples.values())) if not all(len(v) == ref_length for v in samples.values()): raise ValueError("All sample arrays must have the same length") num_frames = int(ref_length / sample_multiple) + num_samples = ref_length + + compressible_segments = ( + [(0, num_samples)] + if compressible_segments is None + else compressible_segments + ) stacked_samples = np.array(list(samples.values())) - runs = self._runs_longer_than_threshold(stacked_samples, 32) + if len(compressible_segments) > 1: + return self._compress_wave_general( + samples, + stacked_samples, + compressible_segments, + num_sample_channles, + num_frames, + num_samples, + sample_multiple, + ) + + compr_start, compr_end = compressible_segments[0] + runs = self._runs_longer_than_threshold( + stacked_samples[:, compr_start:compr_end], 32 + ) if len(runs) == 0: return None if len(runs) == 1: + runs = [(run[0] + compr_start, run[1] + compr_start) for run in runs] return self._compress_wave_simple( samples, sample_multiple, ref_length, runs[0] ) return self._compress_wave_general( - samples, stacked_samples, num_sample_channles, num_frames, sample_multiple + samples, + stacked_samples, + compressible_segments, + num_sample_channles, + num_frames, + num_samples, + sample_multiple, ) diff --git a/laboneq/compiler/common/signal_obj.py b/laboneq/compiler/common/signal_obj.py index 61217b6..d971c6a 100644 --- a/laboneq/compiler/common/signal_obj.py +++ b/laboneq/compiler/common/signal_obj.py @@ -6,11 +6,11 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING, List, Optional +from laboneq.compiler.common.awg_signal_type import AWGSignalType from laboneq.core.types.enums.mixer_type import MixerType if TYPE_CHECKING: from laboneq.compiler.common.awg_info import AWGInfo - from laboneq.compiler.common.device_type import DeviceType @dataclass(init=True, repr=True, order=True) @@ -38,12 +38,9 @@ class SignalObj: """ id: str - sampling_rate: float start_delay: float delay_signal: float - signal_type: str - device_id: str - device_type: DeviceType + signal_type: AWGSignalType | str base_delay_signal: Optional[float] = None oscillator_frequency: float = None # for software modulation only pulses: List = field(default_factory=list) diff --git a/laboneq/compiler/experiment_access/dsl_loader.py b/laboneq/compiler/experiment_access/dsl_loader.py index 0c06f2c..edb0d76 100644 --- a/laboneq/compiler/experiment_access/dsl_loader.py +++ b/laboneq/compiler/experiment_access/dsl_loader.py @@ -7,6 +7,7 @@ import logging import typing import uuid +from dataclasses import dataclass from numbers import Number from types import SimpleNamespace from typing import Any, Callable, Dict, Tuple @@ -29,6 +30,7 @@ if typing.TYPE_CHECKING: from laboneq.dsl.device import DeviceSetup from laboneq.dsl.device.io_units import LogicalSignal + from laboneq.dsl.device.ports import Port from laboneq.dsl.experiment import Experiment, ExperimentSignal from laboneq.dsl.parameter import Parameter @@ -54,7 +56,9 @@ def load(self, experiment: Experiment, device_setup: DeviceSetup): self.add_server(server.uid, server.host, server.port, server.api_level) dest_path_devices = {} - ppc_connections = {} + + # signal -> (device_uid, channel) + ppc_connections: dict[str, tuple[str, int]] = {} reference_clock = None for device in device_setup.instruments: @@ -156,23 +160,24 @@ def load(self, experiment: Experiment, device_setup: DeviceSetup): ls_map[ls.path] = ls mapped_logical_signals: Dict["LogicalSignal", "ExperimentSignal"] = {} + experiment_signals_by_physical_channel = {} for signal in experiment.signals.values(): # Need to create copy here as we'll possibly patch those ExperimentSignals # that touch the same PhysicalChannel try: - mapped_logical_signals[ - ls_map[signal.mapped_logical_signal_path] - ] = copy.deepcopy(signal) + mapped_ls = ls_map[signal.mapped_logical_signal_path] except KeyError: raise LabOneQException( f"Experiment signal '{signal.uid}' has no mapping to a logical signal." ) - - experiment_signals_by_physical_channel = {} - for ls, exp_signal in mapped_logical_signals.items(): + sig_copy = copy.deepcopy(signal) + mapped_logical_signals[mapped_ls] = sig_copy experiment_signals_by_physical_channel.setdefault( - ls.physical_channel, [] - ).append(exp_signal) + mapped_ls.physical_channel, [] + ) + experiment_signals_by_physical_channel[mapped_ls.physical_channel].append( + sig_copy + ) from laboneq.dsl.device.io_units.physical_channel import ( PHYSICAL_CHANNEL_CALIBRATION_FIELDS, @@ -184,14 +189,19 @@ def load(self, experiment: Experiment, device_setup: DeviceSetup): for field_ in PHYSICAL_CHANNEL_CALIBRATION_FIELDS: if field_ in ["mixer_calibration", "precompensation"]: continue - values = set() + unique_value = None + conflicting = False for exp_signal in exp_signals: if not exp_signal.is_calibrated(): continue value = getattr(exp_signal, field_) if value is not None: - values.add(value) - if len(values) > 1: + if unique_value is None: + unique_value = value + elif unique_value != value: + conflicting = True + break + if conflicting: conflicting_signals = [ exp_signal.uid for exp_signal in exp_signals @@ -203,12 +213,11 @@ def load(self, experiment: Experiment, device_setup: DeviceSetup): f"touch physical channel '{pc.uid}', but provide conflicting " f"settings for calibration field '{field_}'." ) - if len(values) > 0: + if unique_value is not None: # Make sure all the experiment signals agree. - value = values.pop() for exp_signal in exp_signals: if exp_signal.is_calibrated(): - setattr(exp_signal.calibration, field_, value) + setattr(exp_signal.calibration, field_, unique_value) for ls in all_logical_signals: calibration = ls.calibration @@ -368,8 +377,9 @@ def opt_param(val: float | Parameter | None) -> float | str | None: ) else: ls_amplifier_pumps[ls.path] = ( - *ppc_connections[ls.path], + ppc_connections[ls.path][0], { + "channel": ppc_connections[ls.path][1], "pump_freq": opt_param(amp_pump.pump_freq), "pump_power": opt_param(amp_pump.pump_power), "cancellation": amp_pump.cancellation, @@ -479,46 +489,40 @@ def opt_param(val: float | Parameter | None) -> float | str | None: }, ) - open_inputs = {} - for instrument in device_setup.instruments: - for input_obj in instrument.ports: - if input_obj.direction == IODirection.IN: - open_inputs[ - (instrument.uid, input_obj.signal_type) - ] = input_obj.connector_labels - - syncing_connections = [] - for instrument in device_setup.instruments: - for connection in instrument.connections: - open_input_found = open_inputs.get( - (connection.remote_path, connection.signal_type) - ) - output = instrument.output_by_uid(connection.local_port) - - if open_input_found is not None: - syncing_connections.append( - ( - instrument.uid, - connection.remote_path, - connection.signal_type, - open_input_found, - output, - ) - ) - - for syncing_connection in syncing_connections: - signal_type = syncing_connection[2] - assert isinstance(syncing_connection[2], type(IOSignalType.DIO)) - if signal_type == IOSignalType.DIO: - dio_leader = syncing_connection[0] - dio_follower = syncing_connection[1] - self._dios.append((dio_leader, dio_follower)) + available_inputs = { + (instrument.uid, input_obj.signal_type) + for instrument in device_setup.instruments + for input_obj in instrument.ports + if input_obj.direction == IODirection.IN + } + + @dataclass + class _SyncingConnection: + leader_device_uid: str + follower_device_uid: str + signal_type: IOSignalType + output: Port | None + + syncing_connections: list[_SyncingConnection] = [ + _SyncingConnection( + leader_device_uid=instrument.uid, + follower_device_uid=connection.remote_path, + signal_type=connection.signal_type, + output=instrument.output_by_uid(connection.local_port), + ) + for instrument in device_setup.instruments + for connection in instrument.connections + if (connection.remote_path, connection.signal_type) in available_inputs + ] - elif signal_type == IOSignalType.ZSYNC: - zsync_leader = syncing_connection[0] - zsync_follower = syncing_connection[1] - port = syncing_connection[4].physical_port_ids[0] - self._pqsc_ports.append((zsync_leader, zsync_follower, int(port))) + for sc in syncing_connections: + if sc.signal_type == IOSignalType.DIO: + self.dios.append((sc.leader_device_uid, sc.follower_device_uid)) + elif sc.signal_type == IOSignalType.ZSYNC: + port = int(sc.output.physical_port_ids[0]) + self.pqsc_ports.append( + (sc.leader_device_uid, sc.follower_device_uid, port) + ) seq_avg_section, sweep_sections = find_sequential_averaging(experiment) if seq_avg_section is not None and len(sweep_sections) > 0: @@ -628,13 +632,9 @@ def _insert_section( has_repeat = False count = 1 - averaging_type = None if hasattr(section, "count"): has_repeat = True count = section.count - if hasattr(section, "averaging_mode"): - if section.averaging_mode.value in ["cyclic", "sequential"]: - averaging_type = "hardware" if hasattr(section, "parameters"): for parameter in section.parameters: @@ -730,7 +730,6 @@ def _insert_section( execution_type=execution_type, count=count, acquisition_types=acquisition_types, - averaging_type=averaging_type, align=align, on_system_grid=on_system_grid, length=length, diff --git a/laboneq/compiler/experiment_access/experiment_dao.py b/laboneq/compiler/experiment_access/experiment_dao.py index e79f0ff..8cce03e 100644 --- a/laboneq/compiler/experiment_access/experiment_dao.py +++ b/laboneq/compiler/experiment_access/experiment_dao.py @@ -27,12 +27,16 @@ class ExperimentDAO: def __init__(self, experiment, core_device_setup=None, core_experiment=None): - self._data = {} + self._data: dict[str, Any] = {} self._acquisition_type: AcquisitionType = None # type: ignore + if core_device_setup is not None and core_experiment is not None: - self._load_from_core(core_device_setup, core_experiment) + self._loader = self._load_from_core(core_device_setup, core_experiment) else: - self._load_experiment(experiment) + self._loader = self._load_experiment(experiment) + self._data = self._loader.data() + self._acquisition_type = self._loader.acquisition_type + self.validate_experiment() def __eq__(self, other): @@ -72,7 +76,7 @@ def add_signal( "amplifier_pump": None, } - def _load_experiment(self, experiment): + def _load_experiment(self, experiment) -> JsonLoader: loader = JsonLoader() try: validator = loader.schema_validator() @@ -82,14 +86,12 @@ def _load_experiment(self, experiment): for line in str(exception).splitlines(): _logger.warning("validation error: %s", line) loader.load(experiment) - self._data = loader.data() - self._acquisition_type = loader.acquisition_type + return loader - def _load_from_core(self, device_setup, experiment): + def _load_from_core(self, device_setup, experiment) -> DSLLoader: loader = DSLLoader() loader.load(experiment, device_setup) - self._data = loader.data() - self._acquisition_type = loader.acquisition_type + return loader @staticmethod def dump(experiment_dao: "ExperimentDAO"): @@ -278,30 +280,27 @@ def section_parent(self, section_id): def is_branch(self, section_id): return self._data["sections"][section_id].state is not None - def pqscs(self): - return [p[0] for p in self._data["pqsc_ports"]] + def pqscs(self) -> list[str]: + return list({p[0] for p in self._loader.pqsc_ports}) - def pqsc_ports(self, pqsc_device_id): + def pqsc_ports(self, pqsc_device_uid: str): return [ {"device": p[1], "port": p[2]} - for p in self._data["pqsc_ports"] - if p[0] == pqsc_device_id + for p in self._loader.pqsc_ports + if p[0] == pqsc_device_uid ] - def dio_followers(self): - return [d[1] for d in self._data["dios"]] + def dio_followers(self) -> list[str]: + return [d[1] for d in self._loader.dios] - def dio_leader(self, device_id): + def dio_leader(self, device_id) -> str | None: try: - return next(d[0] for d in self._data["dios"] if d[1] == device_id) + return next(d[0] for d in self._loader.dios if d[1] == device_id) except StopIteration: return None - def dio_connections(self): - return [(dio[0], dio[1]) for dio in self._data["dios"]] - - def is_dio_leader(self, device_id): - return bool({d[1] for d in self._data["dios"] if d[0] == device_id}) + def dio_connections(self) -> list[tuple[str, str]]: + return self._loader.dios def section_signals(self, section_id): return self._data["section_signals"].get(section_id, set()) @@ -389,7 +388,7 @@ def threshold(self, signal_id): def amplitude(self, signal_id) -> float | str | None: return self._data["signal_connections"][signal_id]["amplitude"] - def amplifier_pump(self, signal_id) -> tuple[str, int, dict[str, Any]] | None: + def amplifier_pump(self, signal_id) -> tuple[str, dict[str, Any]] | None: return self._data["signal_connections"][signal_id]["amplifier_pump"] def section_pulses(self, section_id, signal_id): diff --git a/laboneq/compiler/experiment_access/json_dumper.py b/laboneq/compiler/experiment_access/json_dumper.py index d077c72..f2bd72b 100644 --- a/laboneq/compiler/experiment_access/json_dumper.py +++ b/laboneq/compiler/experiment_access/json_dumper.py @@ -237,8 +237,6 @@ def dump(experiment_dao: ExperimentDAO): "execution_type": section_info.execution_type, "count": section_info.count, } - if section_info.averaging_type is not None: - out_section["repeat"]["averaging_type"] = section_info.averaging_type section_parameters = experiment_dao.section_parameters(section_id) if len(section_parameters) > 0: diff --git a/laboneq/compiler/experiment_access/json_loader.py b/laboneq/compiler/experiment_access/json_loader.py index 14c05cc..63b66d9 100644 --- a/laboneq/compiler/experiment_access/json_loader.py +++ b/laboneq/compiler/experiment_access/json_loader.py @@ -127,7 +127,7 @@ def _load_connectivity(self, experiment): if "connectivity" in experiment: if "dios" in experiment["connectivity"]: for dio in experiment["connectivity"]["dios"]: - self._dios.append((dio["leader"]["$ref"], dio["follower"]["$ref"])) + self.dios.append((dio["leader"]["$ref"], dio["follower"]["$ref"])) if "leader" in experiment["connectivity"]: leader_device_id = experiment["connectivity"]["leader"]["$ref"] @@ -145,7 +145,7 @@ def _load_connectivity(self, experiment): pqsc_device_id = pqsc["device"]["$ref"] if "ports" in pqsc: for port in pqsc["ports"]: - self._pqsc_ports.append( + self.pqsc_ports.append( (pqsc_device_id, port["device"]["$ref"], port["port"]) ) @@ -274,13 +274,10 @@ def _load_sections(self, experiment): execution_type = None length = None count: int = 1 - averaging_type = None if "repeat" in section: has_repeat = True execution_type = section["repeat"]["execution_type"] - if "averaging_type" in section["repeat"]: - averaging_type = section["repeat"]["averaging_type"] count = int(section["repeat"]["count"]) if "parameters" in section["repeat"]: @@ -361,7 +358,6 @@ def _load_sections(self, experiment): execution_type=execution_type, count=count, acquisition_types=acquisition_types, - averaging_type=averaging_type, align=align, on_system_grid=on_system_grid, length=length, diff --git a/laboneq/compiler/experiment_access/loader_base.py b/laboneq/compiler/experiment_access/loader_base.py index e402a33..d1ca465 100644 --- a/laboneq/compiler/experiment_access/loader_base.py +++ b/laboneq/compiler/experiment_access/loader_base.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 import logging -from typing import Optional +from typing import Any, Optional from laboneq.compiler.experiment_access.pulse_def import PulseDef from laboneq.compiler.experiment_access.section_info import SectionInfo @@ -17,11 +17,15 @@ class LoaderBase: def __init__(self): self.acquisition_type: Optional[AcquisitionType] = None + # leader_uid, follower_uid, port + self.pqsc_ports: list[tuple[str, str, int]] = [] + + # leader_uid, follower_uid + self.dios: list[tuple[str, str]] = [] + self._devices = {} self._device_oscillators = {} - self._dios = [] self._oscillators = {} - self._pqsc_ports = [] self._pulses = {} self._sections = {} self._section_parameters = {} @@ -37,13 +41,11 @@ def __init__(self): self._root_sections = set() self._handle_acquires = {} - def data(self): + def data(self) -> dict[str, Any]: return { "devices": self._devices, "device_oscillators": self._device_oscillators, - "dios": self._dios, "oscillators": self._oscillators, - "pqsc_ports": self._pqsc_ports, "pulses": self._pulses, "root_sections": self._root_sections, "sections": self._sections, diff --git a/laboneq/compiler/experiment_access/section_info.py b/laboneq/compiler/experiment_access/section_info.py index f23051d..6a24108 100644 --- a/laboneq/compiler/experiment_access/section_info.py +++ b/laboneq/compiler/experiment_access/section_info.py @@ -13,7 +13,6 @@ class SectionInfo: has_repeat: bool execution_type: Optional[str] acquisition_types: Optional[List[str]] - averaging_type: Optional[str] count: int align: Optional[str] on_system_grid: bool diff --git a/laboneq/compiler/scheduler/match_schedule.py b/laboneq/compiler/scheduler/match_schedule.py index f4a5b5b..2c8182d 100644 --- a/laboneq/compiler/scheduler/match_schedule.py +++ b/laboneq/compiler/scheduler/match_schedule.py @@ -104,8 +104,8 @@ def _compute_start_with_latency( qa_signal_obj = schedule_data.signal_objects[acquire_pulse.pulse.signal_id] - qa_device_type = qa_signal_obj.device_type - qa_sampling_rate = qa_signal_obj.sampling_rate + qa_device_type = qa_signal_obj.awg.device_type + qa_sampling_rate = qa_signal_obj.awg.sampling_rate if qa_signal_obj.is_qc: toolkit_qatype = QAType.SHFQC @@ -151,7 +151,7 @@ def _compute_start_with_latency( for signal in signals: sg_signal_obj = schedule_data.signal_objects[signal] - sg_device_type = sg_signal_obj.device_type + sg_device_type = sg_signal_obj.awg.device_type if sg_signal_obj.is_qc: toolkit_sgtype = SGType.SHFQC else: @@ -183,7 +183,7 @@ def _compute_start_with_latency( time_of_pulse_played = time_of_arrival_at_register + EXECUTETABLEENTRY_LATENCY sg_seq_rate = schedule_data.sampling_rate_tracker.sequencer_rate_for_device( - sg_signal_obj.device_id + sg_signal_obj.awg.device_id ) sg_seq_dt_for_latency_in_ts = round( 1 / (2 * sg_seq_rate * schedule_data.TINYSAMPLE) diff --git a/laboneq/compiler/scheduler/scheduler.py b/laboneq/compiler/scheduler/scheduler.py index a60a8e9..18553a0 100644 --- a/laboneq/compiler/scheduler/scheduler.py +++ b/laboneq/compiler/scheduler/scheduler.py @@ -88,8 +88,6 @@ def __init__( experiment_dao: ExperimentDAO, sampling_rate_tracker: SamplingRateTracker, signal_objects: Dict[str, SignalObj], - # For compatibility with old scheduler, remove once we remove that - _clock_settings: Optional[Dict] = None, settings: Optional[CompilerSettings] = None, ): self._schedule_data = ScheduleData( @@ -419,10 +417,10 @@ def _schedule_phase_reset( signals: FrozenSet[str], hw_signals: FrozenSet[str], ) -> List[PhaseResetSchedule]: - reset_sw_oscillators = ( - len(hw_signals) > 0 - or self._experiment_dao.section_info(section_id).averaging_type - == "hardware" + section_info = self._experiment_dao.section_info(section_id) + reset_sw_oscillators = len(hw_signals) > 0 or ( + section_info.execution_type == "hardware" + and section_info.averaging_mode is not None ) if not reset_sw_oscillators and len(hw_signals) == 0: diff --git a/laboneq/compiler/workflow/compiler.py b/laboneq/compiler/workflow/compiler.py index 4522d00..f99c19a 100644 --- a/laboneq/compiler/workflow/compiler.py +++ b/laboneq/compiler/workflow/compiler.py @@ -13,7 +13,6 @@ from sortedcollections import SortedDict from laboneq._observability.tracing import trace -from laboneq.compiler.code_generator import CodeGenerator from laboneq.compiler.code_generator.measurement_calculator import ( IntegrationTimes, SignalDelays, @@ -37,6 +36,10 @@ precompensation_is_nonzero, verify_precompensation_parameters, ) +from laboneq.compiler.workflow.realtime_compiler import ( + RealtimeCompiler, + RealtimeCompilerOutput, +) from laboneq.compiler.workflow.recipe_generator import RecipeGenerator from laboneq.core.exceptions import LabOneQException from laboneq.core.types.compiled_experiment import CompiledExperiment @@ -48,7 +51,7 @@ @dataclass class LeaderProperties: - global_leader: str = None + global_leader: str | None = None is_desktop_setup: bool = False internal_followers: List[str] = field(default_factory=list) @@ -64,6 +67,7 @@ def __init__(self, settings: Optional[Dict] = None): self._settings = compiler_settings.from_dict(settings) self._sampling_rate_tracker: SamplingRateTracker = None self._scheduler: Scheduler = None + self._rt_compiler_output: RealtimeCompilerOutput = None self._leader_properties = LeaderProperties() self._clock_settings: Dict[str, Any] = {} @@ -213,7 +217,7 @@ def get_first_instr_of(device_infos: List[DeviceInfo], type) -> DeviceInfo: self._clock_settings["use_2GHz_for_HDAWG"] = has_shf self._leader_properties.global_leader = leader - def _process_experiment(self, experiment): + def _process_experiment(self): self._calc_osc_numbering() self._calc_awgs() self._calc_shfqa_generator_allocation() @@ -224,22 +228,15 @@ def _process_experiment(self, experiment): self._calc_integration_unit_allocation() self._precompensations = self._calc_precompensations() self._signal_objects = self._generate_signal_objects() - _logger.debug("Processing Sections:::::::") - - if not self._settings.USE_EXPERIMENTAL_SCHEDULER: - _logger.warning( - "The legacy scheduler has been removed; " - "the 'USE_EXPERIMENTAL_SCHEDULER' compiler flag is ignored." - ) - self._scheduler = Scheduler( + rt_compiler = RealtimeCompiler( self._experiment_dao, self._sampling_rate_tracker, self._signal_objects, - self._clock_settings, self._settings, ) - self._scheduler.run() + + self._rt_compiler_output = rt_compiler.run() @staticmethod def _get_total_rounded_delay(delay, signal_id, device_type, sampling_rate): @@ -265,29 +262,6 @@ def _get_total_rounded_delay(delay, signal_id, device_type, sampling_rate): ) return delay_rounded - @trace("compiler.generate-code()") - def _generate_code(self): - code_generator = CodeGenerator(self._settings) - self._code_generator = code_generator - - for signal_obj in self._signal_objects.values(): - code_generator.add_signal(signal_obj) - - _logger.debug("Preparing events for code generator") - events = self._scheduler.event_timing(expand_loops=False) - - code_generator.gen_acquire_map(events, self._experiment_dao) - code_generator.gen_seq_c( - events, - {k: self._experiment_dao.pulse(k) for k in self._experiment_dao.pulses()}, - ) - self._command_table_match_offsets = code_generator.command_table_match_offsets() - self._feedback_connections = code_generator.feedback_connections() - self._feedback_registers = code_generator.feedback_registers() - code_generator.gen_waves() - - _logger.debug("Code generation completed") - def _calc_osc_numbering(self): self._osc_numbering = {} @@ -596,13 +570,10 @@ class DelayInfo: signal_obj = SignalObj( id=signal_id, - sampling_rate=sampling_rate, start_delay=start_delay, delay_signal=delay_signal, signal_type=signal_type, - device_id=device_id, awg=awg, - device_type=device_type, oscillator_frequency=oscillator_frequency, channels=channels, port_delay=port_delay, @@ -950,7 +921,7 @@ def _generate_recipe(self): self._experiment_dao, self._leader_properties, self._clock_settings ) - for output in self.calc_outputs(self._code_generator.signal_delays()): + for output in self.calc_outputs(self._rt_compiler_output.signal_delays): _logger.debug("Adding output %s", output) recipe_generator.add_output( output["device_id"], @@ -972,7 +943,7 @@ def _generate_recipe(self): amplitude=output["amplitude"], ) - for input in self.calc_inputs(self._code_generator.signal_delays()): + for input in self.calc_inputs(self._rt_compiler_output.signal_delays): _logger.debug("Adding input %s", input) recipe_generator.add_input( input["device_id"], @@ -1000,7 +971,7 @@ def _generate_recipe(self): # is used via a match/state construct for the drive signals of this awg qa_signal_ids = { h.acquire - for h in self._feedback_connections.values() + for h in self._rt_compiler_output.feedback_connections.values() if h.drive.intersection(awg_signals) } if len(qa_signal_ids) > 1: @@ -1012,39 +983,46 @@ def _generate_recipe(self): device_id=device_id, awg_number=awg.awg_number, signal_type=signal_type.value, - seqc=awg.seqc, qa_signal_id=next(iter(qa_signal_ids), None), - command_table_match_offset=self._command_table_match_offsets.get( + command_table_match_offset=self._rt_compiler_output.command_table_match_offsets.get( + awg.key + ), + feedback_register=self._rt_compiler_output.feedback_registers.get( awg.key ), - feedback_register=self._feedback_registers.get(awg.key), + ) + recipe_generator.add_realtime_step( + device_id=device_id, + awg_id=awg.awg_number, + seqc_filename=awg.seqc, + wave_indices_name="", # todo + nt_loop_indices=[], # todo ) - if self._code_generator is None: - raise Exception("Code generator not initialized") + assert self._rt_compiler_output is not None recipe_generator.add_oscillator_params(self._experiment_dao) recipe_generator.add_integrator_allocations( self._integration_unit_allocation, self._experiment_dao, - self._code_generator.integration_weights(), + self._rt_compiler_output.integration_weights, ) recipe_generator.add_acquire_lengths( - integration_times=self._code_generator.integration_times() + integration_times=self._rt_compiler_output.integration_times ) recipe_generator.add_measurements( self.calc_measurement_map( - integration_times=self._code_generator.integration_times() + integration_times=self._rt_compiler_output.integration_times ) ) recipe_generator.add_simultaneous_acquires( - self._code_generator.simultaneous_acquires() + self._rt_compiler_output.simultaneous_acquires ) recipe_generator.add_total_execution_time( - self._code_generator.total_execution_time() + self._rt_compiler_output.total_execution_time ) self._recipe = recipe_generator.recipe() @@ -1053,87 +1031,15 @@ def _generate_recipe(self): def compiler_output(self) -> CompiledExperiment: return CompiledExperiment( recipe=self._recipe, - src=self._code_generator.src(), - waves=self._code_generator.waves(), - wave_indices=self._code_generator.wave_indices(), - command_tables=self._code_generator.command_tables(), - schedule=self._prepare_schedule(), + src=self._rt_compiler_output.src, + waves=self._rt_compiler_output.waves, + wave_indices=self._rt_compiler_output.wave_indices, + command_tables=self._rt_compiler_output.command_tables, + schedule=self._rt_compiler_output.schedule, experiment_dict=ExperimentDAO.dump(self._experiment_dao), - pulse_map=self._code_generator.pulse_map(), - ) - - def _prepare_schedule(self): - event_list = self._scheduler.event_timing( - expand_loops=self._settings.EXPAND_LOOPS_FOR_SCHEDULE, - max_events=self._settings.MAX_EVENTS_TO_PUBLISH, + pulse_map=self._rt_compiler_output.pulse_map, ) - event_list = [ - {k: v for k, v in event.items() if v is not None} for event in event_list - ] - - try: - root_section = self._experiment_dao.root_rt_sections()[0] - except IndexError: - return { - "event_list": [], - "section_graph": {}, - "section_info": {}, - "subsection_map": {}, - "section_signals_with_children": {}, - "sampling_rates": [], - } - - preorder_map = self._scheduler.preorder_map() - - section_info_out = {} - - section_signals_with_children = {} - - for section in [ - root_section, - *self._experiment_dao.all_section_children(root_section), - ]: - section_info = self._experiment_dao.section_info(section) - section_display_name = section_info.section_display_name - section_signals_with_children[section] = list( - self._experiment_dao.section_signals_with_children(section) - ) - section_info_out[section] = { - "section_display_name": section_display_name, - "preorder": preorder_map[section], - } - - sampling_rate_tuples = [] - for signal_id in self._experiment_dao.signals(): - signal_info = self._experiment_dao.signal_info(signal_id) - device_id = signal_info.device_id - device_type = signal_info.device_type - sampling_rate_tuples.append( - ( - device_type, - int( - self._sampling_rate_tracker.sampling_rate_for_device(device_id) - ), - ) - ) - - sampling_rates = [ - [list(set([d[0] for d in sampling_rate_tuples if d[1] == r])), r] - for r in set([t[1] for t in sampling_rate_tuples]) - ] - - _logger.debug("Pulse sheet generation completed") - - return { - "event_list": event_list, - "section_graph": [], # deprecated: not needed by PSV - "section_info": section_info_out, - "subsection_map": {}, # deprecated: not needed by PSV - "section_signals_with_children": section_signals_with_children, - "sampling_rates": sampling_rates, - } - def dump_src(self, info=False): for src in self.compiler_output().src: if info: @@ -1156,9 +1062,7 @@ def run(self, data) -> CompiledExperiment: self.use_experiment(data) self._analyze_setup() - self._process_experiment(data) - - self._generate_code() + self._process_experiment() self._generate_recipe() retval = self.compiler_output() diff --git a/laboneq/compiler/workflow/realtime_compiler.py b/laboneq/compiler/workflow/realtime_compiler.py new file mode 100644 index 0000000..b1720e8 --- /dev/null +++ b/laboneq/compiler/workflow/realtime_compiler.py @@ -0,0 +1,186 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import Any, Dict, List, Optional + +from laboneq._observability.tracing import trace +from laboneq.compiler import CodeGenerator, CompilerSettings +from laboneq.compiler.code_generator import IntegrationTimes +from laboneq.compiler.code_generator.measurement_calculator import SignalDelays +from laboneq.compiler.code_generator.sampled_event_handler import FeedbackConnection +from laboneq.compiler.common.awg_info import AwgKey +from laboneq.compiler.common.signal_obj import SignalObj +from laboneq.compiler.experiment_access import ExperimentDAO +from laboneq.compiler.scheduler.sampling_rate_tracker import SamplingRateTracker +from laboneq.compiler.scheduler.scheduler import Scheduler +from laboneq.core.types.compiled_experiment import PulseMapEntry + +_logger = logging.getLogger(__name__) + + +@dataclass +class RealtimeCompilerOutput: + command_table_match_offsets: Dict[AwgKey, int] + feedback_connections: Dict[str, FeedbackConnection] + feedback_registers: Dict[AwgKey, int] + signal_delays: SignalDelays + integration_weights: Any + integration_times: IntegrationTimes + simultaneous_acquires: Dict[float, Dict[str, str]] + total_execution_time: float + src: List[Dict[str, Any]] + waves: List[Dict[str, Any]] + wave_indices: List[Dict[str, Any]] + command_tables: List[Dict[str, Any]] + pulse_map: Dict[str, PulseMapEntry] + schedule: Dict[str, Any] + + +class RealtimeCompiler: + def __init__( + self, + experiment_dao: ExperimentDAO, + sampling_rate_tracker: SamplingRateTracker, + signal_objects: Dict[str, SignalObj], + settings: Optional[CompilerSettings] = None, + ): + self._experiment_dao = experiment_dao + self._sampling_rate_tracker = sampling_rate_tracker + self._signal_objects = signal_objects + self._settings = settings + + if not self._settings.USE_EXPERIMENTAL_SCHEDULER: + _logger.warning( + "The legacy scheduler has been removed; " + "the 'USE_EXPERIMENTAL_SCHEDULER' compiler flag is ignored." + ) + + self._scheduler = Scheduler( + self._experiment_dao, + self._sampling_rate_tracker, + self._signal_objects, + self._settings, + ) + + self._code_generator = None + + @trace("compiler.generate-code()") + def _generate_code(self): + code_generator = CodeGenerator(self._settings) + self._code_generator = code_generator + + for signal_obj in self._signal_objects.values(): + code_generator.add_signal(signal_obj) + + _logger.debug("Preparing events for code generator") + events = self._scheduler.event_timing(expand_loops=False) + + code_generator.gen_acquire_map(events) + code_generator.gen_seq_c( + events, + {k: self._experiment_dao.pulse(k) for k in self._experiment_dao.pulses()}, + ) + code_generator.gen_waves() + + _logger.debug("Code generation completed") + + def run(self): + # todo: near-time parameters + self._scheduler.run() + self._generate_code() + + compiler_output = RealtimeCompilerOutput( + command_table_match_offsets=self._code_generator.command_table_match_offsets(), + feedback_connections=self._code_generator.feedback_connections(), + feedback_registers=self._code_generator.feedback_registers(), + signal_delays=self._code_generator.signal_delays(), + integration_weights=self._code_generator.integration_weights(), + integration_times=self._code_generator.integration_times(), + simultaneous_acquires=self._code_generator.simultaneous_acquires(), + total_execution_time=self._code_generator.total_execution_time(), + src=self._code_generator.src(), + waves=self._code_generator.waves(), + wave_indices=self._code_generator.wave_indices(), + command_tables=self._code_generator.command_tables(), + pulse_map=self._code_generator.pulse_map(), + schedule=self.prepare_schedule(), + ) + + return compiler_output + + def prepare_schedule(self): + event_list = self._scheduler.event_timing( + expand_loops=self._settings.EXPAND_LOOPS_FOR_SCHEDULE, + max_events=self._settings.MAX_EVENTS_TO_PUBLISH, + ) + + event_list = [ + {k: v for k, v in event.items() if v is not None} for event in event_list + ] + + try: + root_section = self._experiment_dao.root_rt_sections()[0] + except IndexError: + return { + "event_list": [], + "section_graph": {}, + "section_info": {}, + "subsection_map": {}, + "section_signals_with_children": {}, + "sampling_rates": [], + } + + preorder_map = self._scheduler.preorder_map() + + section_info_out = {} + + section_signals_with_children = {} + + for section in [ + root_section, + *self._experiment_dao.all_section_children(root_section), + ]: + section_info = self._experiment_dao.section_info(section) + section_display_name = section_info.section_display_name + section_signals_with_children[section] = list( + self._experiment_dao.section_signals_with_children(section) + ) + section_info_out[section] = { + "section_display_name": section_display_name, + "preorder": preorder_map[section], + } + + sampling_rate_tuples = [] + for signal_id in self._experiment_dao.signals(): + signal_info = self._experiment_dao.signal_info(signal_id) + device_id = signal_info.device_id + device_type = signal_info.device_type + sampling_rate_tuples.append( + ( + device_type, + int( + self._sampling_rate_tracker.sampling_rate_for_device(device_id) + ), + ) + ) + + sampling_rates = [ + [list({d[0] for d in sampling_rate_tuples if d[1] == r}), r] + for r in {t[1] for t in sampling_rate_tuples} + ] + + _logger.debug("Pulse sheet generation completed") + + return { + "event_list": event_list, + "section_graph": [], # deprecated: not needed by PSV + "section_info": section_info_out, + "subsection_map": {}, # deprecated: not needed by PSV + "section_signals_with_children": section_signals_with_children, + "sampling_rates": sampling_rates, + } diff --git a/laboneq/compiler/workflow/recipe_generator.py b/laboneq/compiler/workflow/recipe_generator.py index fa49f27..56c8da0 100644 --- a/laboneq/compiler/workflow/recipe_generator.py +++ b/laboneq/compiler/workflow/recipe_generator.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Optional from laboneq.compiler.code_generator.measurement_calculator import IntegrationTimes from laboneq.compiler.common.device_type import DeviceType @@ -28,7 +28,7 @@ def __init__(self): "unit": {"time": "s", "frequency": "Hz", "phase": "rad"}, "epsilon": {"time": 1e-12}, } - self._recipe["experiment"] = {} + self._recipe["experiment"] = {"realtime_execution_init": []} def add_oscillator_params(self, experiment_dao: ExperimentDAO): hw_oscillators = {} @@ -122,14 +122,14 @@ def add_connectivity_from_experiment( initialization["config"]["repetitions"] = 1 initialization["config"]["holdoff"] = 0 if leader_properties.is_desktop_setup: - initialization["config"]["dio_mode"] = "hdawg_leader" + initialization["config"]["triggering_mode"] = "desktop_leader" if leader_properties.is_desktop_setup: # Internal followers are followers on the same device as the leader. This # is necessary for the standalone SHFQC, where the SHFSG part does neither # appear in the PQSC device connections nor the DIO connections. for f in leader_properties.internal_followers: initialization = self._find_initialization(f) - initialization["config"]["dio_mode"] = "hdawg" + initialization["config"]["triggering_mode"] = "internal_follower" for device in experiment_dao.device_infos(): device_uid = device.id @@ -138,49 +138,31 @@ def add_connectivity_from_experiment( if reference_clock is not None: initialization["config"]["reference_clock"] = reference_clock - try: - initialization["config"]["reference_clock_source"] = clock_settings[ - device_uid - ] - except KeyError: - initialization["config"][ - "reference_clock_source" - ] = device.reference_clock_source - if device.device_type == "hdawg" and clock_settings["use_2GHz_for_HDAWG"]: initialization["config"][ "sampling_rate" ] = DeviceType.HDAWG.sampling_rate_2GHz if device.device_type == "shfppc": - ppchannels = {} + ppchannels = [] for signal in experiment_dao.signals(): amplifier_pump = experiment_dao.amplifier_pump(signal) if amplifier_pump is not None and amplifier_pump[0] == device_uid: - ppchannels[amplifier_pump[1]] = amplifier_pump[2] + ppchannels.append(amplifier_pump[1]) initialization["ppchannels"] = ppchannels for follower in experiment_dao.dio_followers(): initialization = self._find_initialization(follower) - if not leader_properties.is_desktop_setup: - initialization["config"]["dio_mode"] = "hdawg" + if leader_properties.is_desktop_setup: + initialization["config"]["triggering_mode"] = "desktop_dio_follower" else: - initialization["config"][ - "dio_mode" - ] = "dio_follower_of_hdawg_leader" + initialization["config"]["triggering_mode"] = "dio_follower" for pqsc_device_id in experiment_dao.pqscs(): - pqsc_device = self._find_initialization(pqsc_device_id) - out_ports = [] for port in experiment_dao.pqsc_ports(pqsc_device_id): follower_device_id = port["device"] - out_ports.append( - {"port": port["port"], "device_uid": follower_device_id} - ) follower_device_init = self._find_initialization(follower_device_id) - follower_device_init["config"]["dio_mode"] = "zsync_dio" - - pqsc_device["ports"] = out_ports + follower_device_init["config"]["triggering_mode"] = "zsync_follower" def add_output( self, @@ -266,7 +248,6 @@ def add_awg( device_id: str, awg_number: int, signal_type: str, - seqc: str, qa_signal_id: Optional[str], command_table_match_offset: Optional[int], feedback_register: Optional[int], @@ -277,7 +258,6 @@ def add_awg( initialization["awgs"] = [] awg = { "awg": awg_number, - "seqc": seqc, "signal_type": signal_type, "qa_signal_id": qa_signal_id, "command_table_match_offset": command_table_match_offset, @@ -285,6 +265,24 @@ def add_awg( } initialization["awgs"].append(awg) + def add_realtime_step( + self, + device_id: str, + awg_id: int, + seqc_filename: str, + wave_indices_name: str, + nt_loop_indices: List[int], + ): + self._recipe["experiment"]["realtime_execution_init"].append( + { + "device_id": device_id, + "awg_id": awg_id, + "seqc_ref": seqc_filename, + "wave_indices_ref": wave_indices_name, + "nt_step": {"indices": nt_loop_indices}, + } + ) + def from_experiment( self, experiment_dao: ExperimentDAO, diff --git a/laboneq/contrib/example_helpers/plotting/plot_helpers.py b/laboneq/contrib/example_helpers/plotting/plot_helpers.py index 570f1aa..b944ce3 100644 --- a/laboneq/contrib/example_helpers/plotting/plot_helpers.py +++ b/laboneq/contrib/example_helpers/plotting/plot_helpers.py @@ -38,6 +38,9 @@ def plot_simulation( yaxis_label="Amplitude", plot_width=6, plot_height=2, + save=False, + filename="filename", + filetype="svg", ): simulation = OutputSimulator(compiled_experiment) @@ -73,6 +76,7 @@ def plot_simulation( start=start_time, output_length=length, get_trigger=True, + get_marker=True, get_frequency=True, ) @@ -135,7 +139,7 @@ def plot_simulation( xs.append(my_snippet.time) y1s.append(my_snippet.trigger) - labels1.append(f"{signal} Trigger") + labels1.append(f"{signal} - Trigger") titles.append(f"{physcial_channel} - Trigger".upper()) @@ -147,6 +151,30 @@ def plot_simulation( except Exception: pass + if np.any(my_snippet.marker): + try: + if my_snippet.time is not None: + time_length = len(my_snippet.time) + + xs.append(my_snippet.time) + + y1s.append(my_snippet.marker.real) + labels1.append(f"{signal} - Marker 1") + + if np.any(my_snippet.marker.imag): + y2s.append(my_snippet.marker.imag) + labels2.append(f"{signal} - Marker 2") + else: + empty_array = np.empty((1, time_length)) + empty_array.fill(np.nan) + y2s.append(empty_array[0]) + labels2.append(None) + + titles.append(f"{physcial_channel} - {signal} - Marker".upper()) + + except Exception: + pass + fig, axes = plt.subplots( nrows=len(y1s), sharex=False, @@ -188,6 +216,8 @@ def plot_simulation( axes.grid(True) fig.tight_layout() + if save is True: + fig.savefig(f"{filename}.{filetype}", format=f"{filetype}") # fig.legend(loc="upper left") plt.show() diff --git a/laboneq/controller/controller.py b/laboneq/controller/controller.py index 4f7ecf7..38cf99a 100644 --- a/laboneq/controller/controller.py +++ b/laboneq/controller/controller.py @@ -8,11 +8,10 @@ import logging import os import time -import traceback from collections import defaultdict from copy import deepcopy from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable import numpy as np import zhinst.utils @@ -21,7 +20,6 @@ from laboneq import __version__ from laboneq._observability import tracing from laboneq.controller.communication import ( - CachingStrategy, DaqNodeAction, DaqNodeSetAction, DaqWrapper, @@ -31,7 +29,7 @@ from laboneq.controller.devices.device_uhfqa import DeviceUHFQA from laboneq.controller.devices.device_zi import DeviceZI from laboneq.controller.devices.zi_node_monitor import ResponseWaiter -from laboneq.controller.protected_session import ProtectedSession +from laboneq.controller.near_time_runner import NearTimeRunner from laboneq.controller.recipe_1_4_0 import * # noqa: F401, F403 from laboneq.controller.recipe_processor import ( RecipeData, @@ -43,11 +41,10 @@ make_acquired_result, make_empty_results, ) -from laboneq.controller.util import LabOneQControllerException, SweepParamsTracker +from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType from laboneq.core.types.enums.averaging_mode import AveragingMode from laboneq.core.utilities.replace_pulse import ReplacementType, calc_wave_replacements -from laboneq.executor.executor import ExecutorBase, LoopingMode, LoopType if TYPE_CHECKING: from laboneq.core.types import CompiledExperiment @@ -81,12 +78,27 @@ def _stop_controller(controller: "Controller"): controller.shut_down() +@dataclass +class _SeqCCompileItem: + awg_index: int + seqc_code: str | None = None + seqc_filename: str | None = None + elf: bytes | None = None + + +@dataclass +class _UploadItem: + seqc_item: _SeqCCompileItem | None + waves: list[Any] | None + command_table: dict[Any] | None + + class Controller: def __init__( self, run_parameters: ControllerRunParameters = None, device_setup: DeviceSetup = None, - user_functions: Dict[str, Callable] = None, + user_functions: dict[str, Callable] = None, ): self._run_parameters = run_parameters or ControllerRunParameters() self._devices = DeviceCollection( @@ -99,8 +111,8 @@ def __init__( # Waves which are uploaded to the devices via pulse replacements self._current_waves = [] - self._user_functions: Dict[str, Callable] = user_functions - self._nodes_from_user_functions: List[DaqNodeAction] = [] + self._user_functions: dict[str, Callable] = user_functions + self._nodes_from_user_functions: list[DaqNodeAction] = [] self._recipe_data: RecipeData = None self._session = None self._results: Results = None @@ -122,13 +134,8 @@ def _reset_to_idle_state(self): reset_nodes.extend(device.collect_reset_nodes()) batch_set(reset_nodes) - def _wait_for_conditions_to_start(self): - for initialization in self._recipe_data.initializations: - device = self._devices.find_by_uid(initialization.device_uid) - device.wait_for_conditions_to_start() - def _apply_recipe_initializations(self): - nodes_to_initialize: List[DaqNodeAction] = [] + nodes_to_initialize: list[DaqNodeAction] = [] for initialization in self._recipe_data.initializations: device = self._devices.find_by_uid(initialization.device_uid) nodes_to_initialize.extend( @@ -153,18 +160,16 @@ def _set_nodes_before_awg_program_upload(self): batch_set(nodes_to_initialize) @tracing.trace("awg-program-handler") - def _upload_awg_programs(self): - @dataclass - class UploadItem: - awg_index: int - seqc_code: str - seqc_filename: str - waves: List[Any] - command_table: Dict[Any] - elf: Optional[bytes] - + def _upload_awg_programs(self, nt_step: NtStepKey): + if any(i != 0 for i in nt_step.indices): + # Only execute for the 1st NT step + # TODO(2K): remove, once NT steps are properly passed in the recipe. + # See also commented out condition on selection of realtime_execution_init + # element below + return # Mise en place: - awg_data: Dict[DeviceZI, List[UploadItem]] = defaultdict(list) + awg_data: dict[DeviceZI, list[_UploadItem]] = defaultdict(list) + compile_data: dict[DeviceZI, list[_SeqCCompileItem]] = defaultdict(list) recipe_data = self._recipe_data acquisition_type = RtExecutionInfo.get_acquisition_type( recipe_data.rt_execution_infos @@ -177,75 +182,87 @@ class UploadItem: for awg_obj in initialization.awgs: awg_index = awg_obj.awg - seqc_code, waves, command_table = device.prepare_seqc( - awg_obj.seqc, recipe_data.compiled + rt_exec_step = next( + ( + r + for r in recipe_data.recipe.experiment.realtime_execution_init + if r.device_id == initialization.device_uid + and r.awg_id == awg_obj.awg + # and r.nt_step == nt_step # TODO(2K): Enable once ready in recipe + ), + None, ) - awg_data[device].append( - UploadItem( - awg_index, seqc_code, awg_obj.seqc, waves, command_table, None - ) + if rt_exec_step is None: + continue + + seqc_code = device.prepare_seqc( + recipe_data.compiled, rt_exec_step.seqc_ref + ) + # TODO(2K): rt_exec_step.wave_indices_ref instead of seqc_ref + waves = device.prepare_waves( + recipe_data.compiled, rt_exec_step.seqc_ref + ) + # TODO(2K): rt_exec_step.ct_ref instead of seqc_ref + command_table = device.prepare_command_table( + recipe_data.compiled, rt_exec_step.seqc_ref ) - # Compile in parallel: - def worker(device: DeviceZI, item: UploadItem, span: tracing.Span): - with tracing.get_tracer().start_span("compile-awg-thread", span) as _: - item.elf = device.compile_seqc( - item.seqc_code, item.awg_index, item.seqc_filename + seqc_item = _SeqCCompileItem( + awg_index=awg_index, ) - _logger.debug("Started compilation of AWG programs...") - with tracing.get_tracer().start_span("compile-awg-programs") as awg_span: - max_workers = os.environ.get("LABONEQ_AWG_COMPILER_MAX_WORKERS") - max_workers = int(max_workers) if max_workers is not None else None - with concurrent.futures.ThreadPoolExecutor( - max_workers=max_workers - ) as executor: - futures = [ - executor.submit(worker, device, item, awg_span) - for device, items in awg_data.items() - for item in items - ] - concurrent.futures.wait(futures) - exceptions = [ - future.exception() - for future in futures - if future.exception() is not None - ] - if len(exceptions) > 0: - raise LabOneQControllerException( - "Compilation failed. See log output for details." + if seqc_code is not None: + seqc_item.seqc_code = seqc_code + seqc_item.seqc_filename = rt_exec_step.seqc_ref + compile_data[device].append(seqc_item) + + awg_data[device].append( + _UploadItem( + seqc_item=seqc_item, + waves=waves, + command_table=command_table, ) - _logger.debug("Finished compilation.") + ) + + self._awg_compile(compile_data) # Upload AWG programs, waveforms, and command tables: - elf_node_settings: Dict[DaqWrapper, List[DaqNodeSetAction]] = defaultdict(list) - elf_upload_conditions: Dict[DaqWrapper, Dict[str, Any]] = defaultdict(dict) - wf_node_settings: Dict[DaqWrapper, List[DaqNodeSetAction]] = defaultdict(list) + elf_node_settings: dict[DaqWrapper, list[DaqNodeSetAction]] = defaultdict(list) + elf_upload_conditions: dict[DaqWrapper, dict[str, Any]] = defaultdict(dict) + wf_node_settings: dict[DaqWrapper, list[DaqNodeSetAction]] = defaultdict(list) for device, items in awg_data.items(): for item in items: - elf_filename = item.seqc_filename.rsplit(".seqc", 1)[0] + ".elf" - set_action = device.prepare_upload_elf( - item.elf, item.awg_index, elf_filename - ) - node_settings = elf_node_settings[device.daq] - node_settings.append(set_action) + seqc_item = item.seqc_item + if seqc_item.elf is not None: + set_action = device.prepare_upload_elf( + seqc_item.elf, seqc_item.awg_index, seqc_item.seqc_filename + ) + node_settings = elf_node_settings[device.daq] + node_settings.append(set_action) + + if isinstance(device, DeviceUHFQA): + # UHFQA does not yet support upload of ELF and waveforms in + # a single transaction. + ready_node = device.get_sequencer_paths( + seqc_item.awg_index + ).ready + elf_upload_conditions[device.daq][ready_node] = 1 if isinstance(device, DeviceUHFQA): - # UHFQA does not yet support upload of ELF and waveforms in - # a single transaction. - ready_node = device.get_sequencer_paths(item.awg_index)["ready"] - elf_upload_conditions[device.daq][ready_node] = 1 - node_settings = wf_node_settings[device.daq] - - node_settings += device.prepare_upload_all_binary_waves( - item.awg_index, item.waves, acquisition_type - ) + wf_dev_nodes = wf_node_settings[device.daq] + else: + wf_dev_nodes = elf_node_settings[device.daq] + + if item.waves is not None: + wf_dev_nodes += device.prepare_upload_all_binary_waves( + seqc_item.awg_index, item.waves, acquisition_type + ) if item.command_table is not None: set_action = device.prepare_upload_command_table( - item.awg_index, item.command_table + seqc_item.awg_index, item.command_table ) - node_settings.append(set_action) + wf_dev_nodes.append(set_action) if len(elf_upload_conditions) > 0: for daq in elf_upload_conditions.keys(): @@ -269,13 +286,46 @@ def worker(device: DeviceZI, item: UploadItem, span: tracing.Span): raise LabOneQControllerException( f"AWGs not in ready state within timeout ({timeout_s} s)." ) - + if len(wf_node_settings) > 0: _logger.debug("Started upload of waveforms...") with tracing.get_tracer().start_span("upload-waveforms") as _: for daq, nodes in wf_node_settings.items(): daq.batch_set(nodes) _logger.debug("Finished upload.") + @classmethod + def _awg_compile(cls, awg_data: dict[DeviceZI, list[_SeqCCompileItem]]): + # Compile in parallel: + def worker(device: DeviceZI, item: _SeqCCompileItem, span: tracing.Span): + with tracing.get_tracer().start_span("compile-awg-thread", span) as _: + item.elf = device.compile_seqc( + item.seqc_code, item.awg_index, item.seqc_filename + ) + + _logger.debug("Started compilation of AWG programs...") + with tracing.get_tracer().start_span("compile-awg-programs") as awg_span: + max_workers = os.environ.get("LABONEQ_AWG_COMPILER_MAX_WORKERS") + max_workers = int(max_workers) if max_workers is not None else None + with concurrent.futures.ThreadPoolExecutor( + max_workers=max_workers + ) as executor: + futures = [ + executor.submit(worker, device, item, awg_span) + for device, items in awg_data.items() + for item in items + ] + concurrent.futures.wait(futures) + exceptions = [ + future.exception() + for future in futures + if future.exception() is not None + ] + if len(exceptions) > 0: + raise LabOneQControllerException( + "Compilation failed. See log output for details." + ) + _logger.debug("Finished compilation.") + def _set_nodes_after_awg_program_upload(self): nodes_to_initialize = [] for initialization in self._recipe_data.initializations: @@ -286,39 +336,11 @@ def _set_nodes_after_awg_program_upload(self): batch_set(nodes_to_initialize) - def _initialize_awgs(self): + def _initialize_awgs(self, nt_step: NtStepKey): self._set_nodes_before_awg_program_upload() - self._upload_awg_programs() + self._upload_awg_programs(nt_step=nt_step) self._set_nodes_after_awg_program_upload() - def _configure_leaders(self): - _logger.debug( - "Using %s as leaders.", - [d.dev_repr for _, d in self._devices.leaders], - ) - for uid, device in self._devices.leaders: - init = self._recipe_data.get_initialization_by_device_uid(uid) - if init is None: - continue - device.configure_as_leader(init) - - def _configure_followers(self): - _logger.debug( - "Using %s as followers.", - [d.dev_repr for _, d in self._devices.followers], - ) - nodes_to_configure_followers = [] - - for uid, device in self._devices.followers: - init = self._recipe_data.get_initialization_by_device_uid(uid) - if init is None: - continue - nodes_to_configure_followers.extend( - device.collect_follower_configuration_nodes(init) - ) - - batch_set(nodes_to_configure_followers) - def _configure_triggers(self): nodes_to_configure_triggers = [] @@ -338,11 +360,6 @@ def _initialize_devices(self): self._reset_to_idle_state() self._allocate_resources() self._apply_recipe_initializations() - self._initialize_awgs() - self._configure_leaders() - self._configure_followers() - self._configure_triggers() - self._wait_for_conditions_to_start() def _execute_one_step_followers(self): _logger.debug("Settings nodes to start on followers") @@ -361,8 +378,8 @@ def _execute_one_step_followers(self): ) if not response_waiter.wait_all(timeout=2): _logger.warning( - "Conditions to start RT on followers still not fulfilled after 2 seconds, " - "nonetheless trying to continue..." + "Conditions to start RT on followers still not fulfilled after 2" + " seconds, nonetheless trying to continue..." ) # Standalone workaround: The device is triggering itself, @@ -402,8 +419,10 @@ def _wait_execution_to_stop(self, acquisition_type: AcquisitionType): ) if not response_waiter.wait_all(timeout=guarded_wait_time): _logger.warning( - "Stop conditions still not fulfilled after %f s, estimated execution time " - "was %.2f s. Continuing to the next step.", + ( + "Stop conditions still not fulfilled after %f s, estimated" + " execution time was %.2f s. Continuing to the next step." + ), guarded_wait_time, min_wait_time, ) @@ -435,8 +454,8 @@ def connect(self): def disable_outputs( self, - device_uids: List[str] = None, - logical_signals: List[str] = None, + device_uids: list[str] = None, + logical_signals: list[str] = None, unused_only: bool = False, ): self._devices.disable_outputs(device_uids, logical_signals, unused_only) @@ -474,9 +493,7 @@ def execute_compiled( self._nodes_from_user_functions = [] _logger.info("Starting near-time execution...") with tracing.get_tracer().start_span("near-time-execution"): - Controller.NearTimeExecutor(controller=self).run( - self._recipe_data.execution - ) + NearTimeRunner(controller=self).run(self._recipe_data.execution) _logger.info("Finished near-time execution.") for _, device in self._devices.all: device.check_errors() @@ -489,19 +506,16 @@ def execute_compiled( if self._run_parameters.disconnect is True: self.disconnect() - def _find_awg(self, seqc_name: str) -> Tuple[str, int]: + def _find_awg(self, seqc_name: str) -> tuple[str, int]: # TODO(2K): Do this in the recipe preprocessor, or even modify the compiled experiment - # data model - for init in self._recipe_data.initializations: - if init.awgs is None: - continue - for awg in init.awgs: - if awg.seqc == seqc_name: - return init.device_uid, awg.awg + # data model + for rt_exec_step in self._recipe_data.recipe.experiment.realtime_execution_init: + if rt_exec_step.seqc_ref == seqc_name: + return rt_exec_step.device_id, rt_exec_step.awg_id return None, None def replace_pulse( - self, pulse_uid: Union[str, Pulse], pulse_or_array: Union[npt.ArrayLike, Pulse] + self, pulse_uid: str | Pulse, pulse_or_array: npt.ArrayLike | Pulse ): """Replaces specific pulse with the new sample data on the device. @@ -519,14 +533,20 @@ def replace_pulse( ].waveforms.values(): if any([instance.can_compress for instance in waveform.instances]): _logger.error( - "Pulse replacement on pulses that allow compression not allowed. Pulse %s", + ( + "Pulse replacement on pulses that allow compression not" + " allowed. Pulse %s" + ), pulse_uid, ) return if hasattr(pulse_uid, "can_compress") and pulse_uid.can_compress: _logger.error( - "Pulse replacement on pulses that allow compression not allowed. Pulse %s", + ( + "Pulse replacement on pulses that allow compression not allowed." + " Pulse %s" + ), pulse_uid.uid, ) return @@ -543,9 +563,9 @@ def replace_pulse( for a in self._recipe_data.compiled.wave_indices if a["filename"] == repl.awg_id ) - awg_wave_map: Dict[str, List[Union[int, str]]] = awg_indices["value"] + awg_wave_map: dict[str, list[int | str]] = awg_indices["value"] target_wave = awg_wave_map.get(repl.sig_string) - seqc_name = repl.awg_id[: -len("_waveindices.csv")] + ".seqc" + seqc_name = repl.awg_id awg = self._find_awg(seqc_name) device = self._devices.find_by_uid(awg[0]) @@ -604,103 +624,6 @@ def _prepare_rt_execution(self, rt_section_uid: str) -> list[DaqNodeAction]: ) return nodes_to_prepare_rt - class NearTimeExecutor(ExecutorBase): - def __init__(self, controller: Controller): - super().__init__(looping_mode=LoopingMode.EXECUTE) - self.controller = controller - self.user_set_nodes = [] - self.nt_loop_indices: list[int] = [] - self.sweep_params_tracker = SweepParamsTracker() - - def set_handler(self, path: str, value): - dev = self.controller._devices.find_by_node_path(path) - self.user_set_nodes.append( - DaqNodeSetAction( - dev._daq, path, value, caching_strategy=CachingStrategy.NO_CACHE - ) - ) - - def user_func_handler(self, func_name: str, args: Dict[str, Any]): - func = self.controller._user_functions.get(func_name) - if func is None: - raise LabOneQControllerException( - f"User function '{func_name}' is not registered." - ) - res = func(ProtectedSession(self.controller._session), **args) - user_func_results = self.controller._results.user_func_results.setdefault( - func_name, [] - ) - user_func_results.append(res) - - def set_sw_param_handler( - self, - name: str, - index: int, - value: float, - axis_name: str, - values: npt.ArrayLike, - ): - self.sweep_params_tracker.set_param(name, value) - - def for_loop_handler( - self, count: int, index: int, loop_type: LoopType, enter: bool - ): - if enter: - self.nt_loop_indices.append(index) - else: - self.nt_loop_indices.pop() - - def rt_handler( - self, - count: int, - uid: str, - averaging_mode: AveragingMode, - acquisition_type: AcquisitionType, - enter: bool, - ): - if enter: - attribute_value_tracker = ( - self.controller._recipe_data.attribute_value_tracker - ) - for param in self.sweep_params_tracker.updated_params(): - attribute_value_tracker.update( - param, self.sweep_params_tracker.get_param(param) - ) - self.sweep_params_tracker.clear_for_next_step() - - nt_sweep_nodes: list[DaqNodeAction] = [] - for device_uid, device in self.controller._devices.all: - nt_sweep_nodes.extend( - device.collect_prepare_nt_step_nodes( - attribute_value_tracker.device_view(device_uid), - self.controller._recipe_data, - ) - ) - - step_prepare_nodes = self.controller._prepare_rt_execution( - rt_section_uid=uid - ) - - batch_set([*self.user_set_nodes, *nt_sweep_nodes, *step_prepare_nodes]) - self.user_set_nodes.clear() - - for retry in range(3): # Up to 3 retries - if retry > 0: - _logger.info("Step retry %s of 3...", retry + 1) - batch_set(step_prepare_nodes) - try: - self.controller._execute_one_step(acquisition_type) - self.controller._read_one_step_results( - nt_loop_indices=self.nt_loop_indices, rt_section_uid=uid - ) - break - except LabOneQControllerException: # TODO(2K): introduce "hard" controller exceptions - self.controller._report_step_error( - nt_loop_indices=self.nt_loop_indices, - rt_section_uid=uid, - message=traceback.format_exc(), - ) - def _prepare_result_shapes(self): if self._results is None: self._results = make_empty_results() @@ -715,9 +638,9 @@ def _prepare_result_shapes(self): if rt_info.acquisition_type == AcquisitionType.RAW: if len(self._recipe_data.result_shapes) > 1: raise LabOneQControllerException( - f"Multiple raw acquire events with handles " + "Multiple raw acquire events with handles " f"{list(self._recipe_data.result_shapes.keys())}. " - f"Only single raw acquire per experiment allowed." + "Only single raw acquire per experiment allowed." ) signal_id = rt_info.signal_by_handle(handle) awg_config = self._recipe_data.awg_config_by_acquire_signal(signal_id) @@ -755,7 +678,7 @@ def _prepare_result_shapes(self): empty_res.data[:] = np.nan self._results.acquired_results[handle] = empty_res - def _read_one_step_results(self, nt_loop_indices: List[int], rt_section_uid: str): + def _read_one_step_results(self, nt_step: NtStepKey, rt_section_uid: str): if rt_section_uid is None: return # Old recipe-based execution - skip partial result processing rt_execution_info = self._recipe_data.rt_execution_infos[rt_section_uid] @@ -807,12 +730,10 @@ def _read_one_step_results(self, nt_loop_indices: List[int], rt_section_uid: str continue # unused entries in sparse result vector map to None handle result = self._results.acquired_results[handle] build_partial_result( - result, nt_loop_indices, raw_results, mapping, handle + result, nt_step, raw_results, mapping, handle ) - def _report_step_error( - self, nt_loop_indices: List[int], rt_section_uid: str, message: str - ): + def _report_step_error(self, nt_step: NtStepKey, rt_section_uid: str, message: str): self._results.execution_errors.append( - (deepcopy(nt_loop_indices), rt_section_uid, message) + (list(nt_step.indices), rt_section_uid, message) ) diff --git a/laboneq/controller/devices/device_collection.py b/laboneq/controller/devices/device_collection.py index 5a76a7c..ea1d842 100644 --- a/laboneq/controller/devices/device_collection.py +++ b/laboneq/controller/devices/device_collection.py @@ -86,7 +86,7 @@ def find_by_uid(self, device_uid) -> DeviceZI: return device def find_by_node_path(self, path: str) -> DeviceZI: - m = re.match(r"^/?(DEV\d+)/.+", path.upper()) + m = re.match(r"^/?(DEV[^/]+)/.+", path.upper()) if m is None: raise LabOneQControllerException( f"Path '{path}' is not referring to any device" @@ -349,14 +349,18 @@ def make_device_qualifier( f"device '{instrument.uid}'" ) to_port = f"{connection.signal_type.name}/{connection.remote_port}" - from_dev.add_downlink(from_port, to_dev_uid, to_dev) + if not to_dev.is_secondary: + from_dev.add_downlink(from_port, to_dev_uid, to_dev) to_dev.add_uplink(to_port, from_dev) # Move various device settings from device setup for instrument in self._ds.instruments: dev = self._devices[instrument.uid] - # Set clock source (external by default) + # Set the clock source (external by default) + # TODO(2K): Simplify the logic in this code snippet and the one in 'update_clock_source'. + # Currently, it adheres to the previously existing logic in the compiler, but it appears + # unnecessarily convoluted. force_internal: bool | None = None if instrument.reference_clock_source is not None: force_internal = ( diff --git a/laboneq/controller/devices/device_hdawg.py b/laboneq/controller/devices/device_hdawg.py index 97ccebd..386db4d 100644 --- a/laboneq/controller/devices/device_hdawg.py +++ b/laboneq/controller/devices/device_hdawg.py @@ -28,7 +28,7 @@ Response, ) from laboneq.controller.recipe_1_4_0 import Initialization -from laboneq.controller.recipe_enums import DIOConfigType, SignalType +from laboneq.controller.recipe_enums import SignalType, TriggeringMode from laboneq.controller.recipe_processor import DeviceRecipeData, RecipeData from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType @@ -445,11 +445,6 @@ def collect_prepare_nt_step_nodes( return nodes_to_set - def wait_for_conditions_to_start(self): - self._wait_for_node( - f"/{self.serial}/system/clocks/sampleclock/status", 0, timeout=5 - ) - def collect_awg_before_upload_nodes( self, initialization: Initialization.Data, recipe_data: RecipeData ): @@ -477,8 +472,8 @@ def collect_trigger_configuration_nodes( _logger.debug("%s: Configuring trigger configuration nodes.", self.dev_repr) nodes_to_configure_triggers = [] - dio_mode = initialization.config.dio_mode - if dio_mode == DIOConfigType.ZSYNC_DIO: + triggering_mode = initialization.config.triggering_mode + if triggering_mode == TriggeringMode.ZSYNC_FOLLOWER: _logger.debug( "%s: Configuring DIO mode: ZSync pass-through.", self.dev_repr ) @@ -538,7 +533,7 @@ def collect_trigger_configuration_nodes( ), ] ) - elif dio_mode == DIOConfigType.HDAWG_LEADER: + elif triggering_mode == TriggeringMode.DESKTOP_LEADER: nodes_to_configure_triggers.append( DaqNodeSetAction( diff --git a/laboneq/controller/devices/device_pqsc.py b/laboneq/controller/devices/device_pqsc.py index f7ccaa4..37fdf72 100644 --- a/laboneq/controller/devices/device_pqsc.py +++ b/laboneq/controller/devices/device_pqsc.py @@ -4,6 +4,7 @@ from __future__ import annotations import logging +from enum import IntEnum from typing import Any from laboneq.controller.communication import ( @@ -20,13 +21,14 @@ ) from laboneq.controller.recipe_1_4_0 import Initialization from laboneq.controller.recipe_processor import DeviceRecipeData, RecipeData -from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType _logger = logging.getLogger(__name__) -REFERENCE_CLOCK_SOURCE_INTERNAL = 0 -REFERENCE_CLOCK_SOURCE_EXTERNAL = 1 + +class ReferenceClockSourcePQSC(IntEnum): + INTERNAL = 0 + EXTERNAL = 1 class DevicePQSC(DeviceZI): @@ -46,9 +48,9 @@ def update_clock_source(self, force_internal: bool | None): def clock_source_control_nodes(self) -> list[NodeControlBase]: source = ( - REFERENCE_CLOCK_SOURCE_INTERNAL + ReferenceClockSourcePQSC.INTERNAL if self._use_internal_clock - else REFERENCE_CLOCK_SOURCE_EXTERNAL + else ReferenceClockSourcePQSC.EXTERNAL ) expected_freq = None if self._use_internal_clock else 10e6 return [ @@ -138,6 +140,35 @@ def conditions_for_execution_done( def collect_trigger_configuration_nodes( self, initialization: Initialization.Data, recipe_data: RecipeData ) -> list[DaqNodeAction]: + # TODO(2K): This was moved as is from no more existing "configure_as_leader". + # Verify, if separate `batch_set` per node is truly necessary here, or the corresponding + # nodes can be set in one batch with others. + _logger.debug( + "%s: Setting reference clock frequency to %d MHz...", + self.dev_repr, + initialization.config.reference_clock, + ) + + self._daq.batch_set( + [ + DaqNodeSetAction( + self._daq, + f"/{self.serial}/system/clocks/referenceclock/out/enable", + 1, + ) + ] + ) + + self._daq.batch_set( + [ + DaqNodeSetAction( + self._daq, + f"/{self.serial}/system/clocks/referenceclock/out/freq", + initialization.config.reference_clock, + ) + ] + ) + # Ensure ZSync links are established # TODO(2K): This is rather a hotfix, waiting to be done in parallel for all devices with # subscription / poll @@ -182,38 +213,3 @@ def collect_trigger_configuration_nodes( ) return nodes_to_configure_triggers - - def collect_follower_configuration_nodes( - self, initialization: Initialization.Data - ) -> list[DaqNodeAction]: - raise LabOneQControllerException("PQSC cannot be configured as follower") - - def configure_as_leader(self, initialization: Initialization.Data): - _logger.debug("%s: Configuring as leader...", self.dev_repr) - _logger.debug("%s: Enabling reference clock...", self.dev_repr) - - _logger.debug( - "%s: Setting reference clock frequency to %d MHz...", - self.dev_repr, - initialization.config.reference_clock, - ) - - self._daq.batch_set( - [ - DaqNodeSetAction( - self._daq, - f"/{self.serial}/system/clocks/referenceclock/out/enable", - 1, - ) - ] - ) - - self._daq.batch_set( - [ - DaqNodeSetAction( - self._daq, - f"/{self.serial}/system/clocks/referenceclock/out/freq", - initialization.config.reference_clock, - ) - ] - ) diff --git a/laboneq/controller/devices/device_setup_dao.py b/laboneq/controller/devices/device_setup_dao.py index 569cb05..8670441 100644 --- a/laboneq/controller/devices/device_setup_dao.py +++ b/laboneq/controller/devices/device_setup_dao.py @@ -37,6 +37,15 @@ def instruments(self) -> Iterator[ZIStandardInstrument]: for instrument in self._device_setup.instruments: if isinstance(instrument, ZIStandardInstrument): yield instrument + if hasattr(instrument, "device_type"): + if instrument.device_type.name in { + "HDAWG", + "UHFQA", + "SHFQA", + "SHFQC", + "SHFSG", + }: + yield instrument @property def servers(self) -> Iterator[tuple[str, DataServer]]: diff --git a/laboneq/controller/devices/device_shf_base.py b/laboneq/controller/devices/device_shf_base.py new file mode 100644 index 0000000..0a7550e --- /dev/null +++ b/laboneq/controller/devices/device_shf_base.py @@ -0,0 +1,56 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +from enum import IntEnum + +from laboneq.controller.devices.device_zi import DeviceQualifier, DeviceZI +from laboneq.controller.devices.zi_node_monitor import ( + Command, + Condition, + NodeControlBase, + Response, +) + + +class ReferenceClockSourceSHF(IntEnum): + INTERNAL = 0 + EXTERNAL = 1 + ZSYNC = 2 + + +class DeviceSHFBase(DeviceZI): + def __init__(self, device_qualifier: DeviceQualifier): + super().__init__(device_qualifier) + self._reference_clock_source = ReferenceClockSourceSHF.ZSYNC + + def update_clock_source(self, force_internal: bool | None): + if self.is_standalone() and force_internal is not False: + # Internal is the default (or explicit) for standalone SHF + self._reference_clock_source = ReferenceClockSourceSHF.INTERNAL + elif self.is_standalone() and force_internal is not True: + # External specified explicitly for standalone SHF + self._reference_clock_source = ReferenceClockSourceSHF.EXTERNAL + else: + # ZSync is the only possible source when device is not standalone + self._reference_clock_source = ReferenceClockSourceSHF.ZSYNC + + def clock_source_control_nodes(self) -> list[NodeControlBase]: + expected_freq = { + ReferenceClockSourceSHF.INTERNAL: None, + ReferenceClockSourceSHF.EXTERNAL: 10e6, + ReferenceClockSourceSHF.ZSYNC: 100e6, + }[self._reference_clock_source] + source = self._reference_clock_source.value + + return [ + Condition( + f"/{self.serial}/system/clocks/referenceclock/in/freq", expected_freq + ), + Command(f"/{self.serial}/system/clocks/referenceclock/in/source", source), + Response( + f"/{self.serial}/system/clocks/referenceclock/in/sourceactual", source + ), + Response(f"/{self.serial}/system/clocks/referenceclock/in/status", 0), + ] diff --git a/laboneq/controller/devices/device_shfppc.py b/laboneq/controller/devices/device_shfppc.py index 4a78cde..2f94d16 100644 --- a/laboneq/controller/devices/device_shfppc.py +++ b/laboneq/controller/devices/device_shfppc.py @@ -52,8 +52,9 @@ def pre_process_attributes( initialization: Initialization.Data, ) -> Iterator[DeviceAttribute]: yield from super().pre_process_attributes(initialization) - ppchannels = initialization.ppchannels or {} - for channel, settings in ppchannels.items(): + ppchannels = initialization.ppchannels or [] + for settings in ppchannels: + channel = settings["channel"] for key, attribute_name in DeviceSHFPPC.attribute_keys.items(): if key in settings: yield DeviceAttribute( @@ -70,19 +71,20 @@ def collect_initialization_nodes( self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data ) -> list[DaqNodeAction]: nodes_to_set: list[DaqNodeAction] = [] - ppchannels = initialization.ppchannels or {} + ppchannels = initialization.ppchannels or [] def _convert(value): if isinstance(value, bool): return 1 if value else 0 return value - for ch, settings in ppchannels.items(): + for settings in ppchannels: + ch = settings["channel"] nodes_to_set.append( DaqNodeSetAction(self._daq, self._key_to_path("_on", ch), 1) ) for key, value in settings.items(): - if value is None or key in DeviceSHFPPC.attribute_keys: + if value is None or key in [*DeviceSHFPPC.attribute_keys, "channel"]: # Skip not set values, or values that are bound to sweep params and will # be set during the NT execution. continue diff --git a/laboneq/controller/devices/device_shfqa.py b/laboneq/controller/devices/device_shfqa.py index 2941f9d..d3ff354 100644 --- a/laboneq/controller/devices/device_shfqa.py +++ b/laboneq/controller/devices/device_shfqa.py @@ -21,14 +21,18 @@ DaqNodeGetAction, DaqNodeSetAction, ) -from laboneq.controller.devices.device_zi import DeviceZI, delay_to_rounded_samples +from laboneq.controller.devices.device_shf_base import DeviceSHFBase +from laboneq.controller.devices.device_zi import ( + SequencerPaths, + delay_to_rounded_samples, +) from laboneq.controller.recipe_1_4_0 import ( IO, Initialization, IntegratorAllocation, Measurement, ) -from laboneq.controller.recipe_enums import DIOConfigType +from laboneq.controller.recipe_enums import TriggeringMode from laboneq.controller.recipe_processor import ( AwgConfig, AwgKey, @@ -40,14 +44,9 @@ from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType from laboneq.core.types.enums.averaging_mode import AveragingMode -from laboneq.core.types.enums.reference_clock_source import ReferenceClockSource _logger = logging.getLogger(__name__) -REFERENCE_CLOCK_SOURCE_INTERNAL = 0 -REFERENCE_CLOCK_SOURCE_EXTERNAL = 1 -REFERENCE_CLOCK_SOURCE_ZSYNC = 2 - INTERNAL_TRIGGER_CHANNEL = 1024 # PQSC style triggering on the SHFSG/QC SOFTWARE_TRIGGER_CHANNEL = 8 # Software triggering on the SHFQA @@ -61,7 +60,7 @@ # maximum delay to 1 us for now -class DeviceSHFQA(DeviceZI): +class DeviceSHFQA(DeviceSHFBase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.dev_type = "SHFQA4" @@ -94,13 +93,13 @@ def _process_dev_opts(self): def _get_sequencer_type(self) -> str: return "qa" - def _get_sequencer_path_patterns(self) -> dict: - return { - "elf": "/{serial}/qachannels/{index}/generator/elf/data", - "progress": "/{serial}/qachannels/{index}/generator/elf/progress", - "enable": "/{serial}/qachannels/{index}/generator/enable", - "ready": "/{serial}/qachannels/{index}/generator/ready", - } + def get_sequencer_paths(self, index: int) -> SequencerPaths: + return SequencerPaths( + elf=f"/{self.serial}/qachannels/{index}/generator/elf/data", + progress=f"/{self.serial}/qachannels/{index}/generator/elf/progress", + enable=f"/{self.serial}/qachannels/{index}/generator/enable", + ready=f"/{self.serial}/qachannels/{index}/generator/ready", + ) def _get_num_awgs(self): return self._channels @@ -886,7 +885,7 @@ def collect_awg_after_upload_nodes(self, initialization: Initialization.Data): for measurement in initialization.measurements: channel = 0 - if initialization.config.dio_mode == DIOConfigType.HDAWG_LEADER: + if initialization.config.triggering_mode == TriggeringMode.DESKTOP_LEADER: # standalone QA oder QC channel = ( SOFTWARE_TRIGGER_CHANNEL @@ -913,78 +912,39 @@ def collect_trigger_configuration_nodes( nodes_to_configure_triggers = [] - dio_mode = initialization.config.dio_mode + triggering_mode = initialization.config.triggering_mode - if dio_mode == DIOConfigType.ZSYNC_DIO: + if triggering_mode == TriggeringMode.ZSYNC_FOLLOWER: pass - elif dio_mode == DIOConfigType.HDAWG_LEADER: + elif triggering_mode == TriggeringMode.DESKTOP_LEADER: self._wait_for_awgs = False self._emit_trigger = True - clock_source = initialization.config.reference_clock_source - ntc = [ - ( - "system/clocks/referenceclock/in/source", - REFERENCE_CLOCK_SOURCE_INTERNAL - if clock_source - and clock_source.value == ReferenceClockSource.INTERNAL.value - else REFERENCE_CLOCK_SOURCE_EXTERNAL, - ) - ] if self.options.is_qc: - ntc += [ - ("system/internaltrigger/enable", 0), - ("system/internaltrigger/repetitions", 1), - ] - return [ - DaqNodeSetAction(self._daq, f"/{self.serial}/{node}", v) - for node, v in ntc - ] - + int_trig_base = f"/{self.serial}/system/internaltrigger" + nodes_to_configure_triggers.append( + DaqNodeSetAction(self._daq, f"{int_trig_base}/enable", 0) + ) + nodes_to_configure_triggers.append( + DaqNodeSetAction(self._daq, f"{int_trig_base}/repetitions", 1) + ) else: raise LabOneQControllerException( - f"Unsupported DIO mode: {dio_mode} for device type SHFQA." + f"Unsupported triggering mode: {triggering_mode} for device type SHFQA." ) for awg_index in ( self._allocated_awgs if len(self._allocated_awgs) > 0 else range(1) ): - marker_path = f"/{self.serial}/qachannels/{awg_index}/markers" + markers_base = f"/{self.serial}/qachannels/{awg_index}/markers" src = 32 + awg_index nodes_to_configure_triggers.append( - DaqNodeSetAction(self._daq, f"{marker_path}/0/source", src), + DaqNodeSetAction(self._daq, f"{markers_base}/0/source", src), ) nodes_to_configure_triggers.append( - DaqNodeSetAction(self._daq, f"{marker_path}/1/source", src), + DaqNodeSetAction(self._daq, f"{markers_base}/1/source", src), ) return nodes_to_configure_triggers - def configure_as_leader(self, initialization: Initialization.Data): - raise LabOneQControllerException("SHFQA cannot be configured as leader") - - def collect_follower_configuration_nodes( - self, initialization: Initialization.Data - ) -> list[DaqNodeAction]: - dio_mode = initialization.config.dio_mode - _logger.debug("%s: Configuring as a follower...", self.dev_repr) - - nodes_to_configure_as_follower = [] - - if dio_mode == DIOConfigType.ZSYNC_DIO: - _logger.debug( - "%s: Configuring reference clock to use ZSYNC as a reference...", - self.dev_repr, - ) - self._switch_reference_clock(source=2, expected_freqs=100e6) - elif dio_mode == DIOConfigType.HDAWG_LEADER: - # standalone - pass - else: - raise LabOneQControllerException( - f"Unsupported DIO mode: {dio_mode} for device type SHFQA." - ) - - return nodes_to_configure_as_follower - def get_measurement_data( self, channel: int, diff --git a/laboneq/controller/devices/device_shfsg.py b/laboneq/controller/devices/device_shfsg.py index 37482bf..87f05f2 100644 --- a/laboneq/controller/devices/device_shfsg.py +++ b/laboneq/controller/devices/device_shfsg.py @@ -19,21 +19,19 @@ DaqNodeAction, DaqNodeSetAction, ) -from laboneq.controller.devices.device_zi import DeviceZI +from laboneq.controller.devices.device_shf_base import DeviceSHFBase +from laboneq.controller.devices.device_zi import SequencerPaths +from laboneq.controller.devices.zi_node_monitor import NodeControlBase from laboneq.controller.recipe_1_4_0 import IO, Initialization -from laboneq.controller.recipe_enums import DIOConfigType, ReferenceClockSource +from laboneq.controller.recipe_enums import TriggeringMode from laboneq.controller.recipe_processor import DeviceRecipeData, RecipeData from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType _logger = logging.getLogger(__name__) -REFERENCE_CLOCK_SOURCE_INTERNAL = 0 -REFERENCE_CLOCK_SOURCE_EXTERNAL = 1 -REFERENCE_CLOCK_SOURCE_ZSYNC = 2 - -class DeviceSHFSG(DeviceZI): +class DeviceSHFSG(DeviceSHFBase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.dev_type = "SHFSG8" @@ -49,6 +47,10 @@ def dev_repr(self) -> str: return f"SHFQC/SG:{self.serial}" return f"SHFSG:{self.serial}" + @property + def is_secondary(self) -> bool: + return self.options.qc_with_qa + def _process_dev_opts(self): if self.dev_type == "SHFSG8": self._channels = 8 @@ -88,13 +90,13 @@ def _process_dev_opts(self): def _get_sequencer_type(self) -> str: return "sg" - def _get_sequencer_path_patterns(self) -> dict: - return { - "elf": "/{serial}/sgchannels/{index}/awg/elf/data", - "progress": "/{serial}/sgchannels/{index}/awg/elf/progress", - "enable": "/{serial}/sgchannels/{index}/awg/enable", - "ready": "/{serial}/sgchannels/{index}/awg/ready", - } + def get_sequencer_paths(self, index: int) -> SequencerPaths: + return SequencerPaths( + elf=f"/{self.serial}/sgchannels/{index}/awg/elf/data", + progress=f"/{self.serial}/sgchannels/{index}/awg/elf/progress", + enable=f"/{self.serial}/sgchannels/{index}/awg/enable", + ready=f"/{self.serial}/sgchannels/{index}/awg/ready", + ) def _get_num_awgs(self): return self._channels @@ -159,6 +161,12 @@ def _nodes_to_monitor_impl(self) -> list[str]: nodes.append(f"/{self.serial}/sgchannels/{awg}/awg/ready") return nodes + def clock_source_control_nodes(self) -> list[NodeControlBase]: + if self.is_secondary: + return [] # QA will initialize the nodes + else: + return super().clock_source_control_nodes() + def collect_execution_nodes(self): _logger.debug("Starting execution...") return [ @@ -440,7 +448,7 @@ def collect_trigger_configuration_nodes( or awg_config.qa_signal_id is None ): continue - if awg_config.source_feedback_register is None and self.options.qc_with_qa: + if awg_config.source_feedback_register is None and self.is_secondary: # local feedback ntc.extend( [ @@ -480,25 +488,20 @@ def collect_trigger_configuration_nodes( ), ] ) - dio_mode = initialization.config.dio_mode + triggering_mode = initialization.config.triggering_mode - if dio_mode == DIOConfigType.ZSYNC_DIO: + if triggering_mode == TriggeringMode.ZSYNC_FOLLOWER: pass - elif dio_mode in (DIOConfigType.HDAWG_LEADER, DIOConfigType.HDAWG): + elif triggering_mode in ( + TriggeringMode.DESKTOP_LEADER, + TriggeringMode.INTERNAL_FOLLOWER, + ): # standalone SHFSG or SHFQC self._wait_for_awgs = False - if not self.options.qc_with_qa: + if not self.is_secondary: # otherwise, the QA will initialize the nodes self._emit_trigger = True - clock_source = initialization.config.reference_clock_source ntc += [ - ( - "system/clocks/referenceclock/in/source", - REFERENCE_CLOCK_SOURCE_INTERNAL - if clock_source - and clock_source.value == ReferenceClockSource.INTERNAL.value - else REFERENCE_CLOCK_SOURCE_EXTERNAL, - ), ("system/internaltrigger/enable", 0), ("system/internaltrigger/repetitions", 1), ] @@ -513,7 +516,7 @@ def collect_trigger_configuration_nodes( ] else: raise LabOneQControllerException( - f"Unsupported DIO mode: {dio_mode} for device type SHFSG." + f"Unsupported triggering mode: {triggering_mode} for device type SHFSG." ) nodes_to_configure_triggers = [ @@ -531,39 +534,6 @@ def add_command_table_header(self, body: dict) -> dict: def command_table_path(self, awg_index: int) -> str: return f"/{self.serial}/sgchannels/{awg_index}/awg/commandtable/" - def configure_as_leader(self, initialization: Initialization.Data): - raise LabOneQControllerException("SHFSG cannot be configured as leader") - - def collect_follower_configuration_nodes( - self, initialization: Initialization.Data - ) -> list[DaqNodeAction]: - if self.options.qc_with_qa: - return [] # QC follower config is done over it's QA part - - dio_mode = initialization.config.dio_mode - _logger.debug("%s: Configuring as a follower...", self.dev_repr) - - nodes_to_configure_as_follower = [] - - if dio_mode == DIOConfigType.ZSYNC_DIO: - _logger.debug( - "%s: Configuring reference clock to use ZSYNC as a reference...", - self.dev_repr, - ) - self._switch_reference_clock(source=2, expected_freqs=100e6) - elif dio_mode == DIOConfigType.HDAWG_LEADER: - # standalone - pass - elif dio_mode == DIOConfigType.HDAWG: - # standalone as part of an SHFQC with active QA part - pass - else: - raise LabOneQControllerException( - f"Unsupported DIO mode: {dio_mode} for device type SHFSG." - ) - - return nodes_to_configure_as_follower - def collect_reset_nodes(self) -> list[DaqNodeAction]: reset_nodes = super().collect_reset_nodes() reset_nodes.append( diff --git a/laboneq/controller/devices/device_uhfqa.py b/laboneq/controller/devices/device_uhfqa.py index 4378ea8..0de3efe 100644 --- a/laboneq/controller/devices/device_uhfqa.py +++ b/laboneq/controller/devices/device_uhfqa.py @@ -21,7 +21,7 @@ from laboneq.controller.devices.device_zi import DeviceZI, delay_to_rounded_samples from laboneq.controller.devices.zi_node_monitor import Command, NodeControlBase from laboneq.controller.recipe_1_4_0 import IO, Initialization, IntegratorAllocation -from laboneq.controller.recipe_enums import DIOConfigType +from laboneq.controller.recipe_enums import TriggeringMode from laboneq.controller.recipe_processor import ( AwgConfig, AwgKey, @@ -88,12 +88,6 @@ def _nodes_to_monitor_impl(self) -> list[str]: nodes.append(f"/{self.serial}/awgs/{awg}/ready") return nodes - def _error_as_leader(self): - raise LabOneQControllerException( - f"{self.dev_repr}: UHFQA cannot be configured as leader, ensure correct DIO " - f"connection in the device setup" - ) - def _error_ambiguous_upstream(self): raise LabOneQControllerException( f"{self.dev_repr}: Can't determine unambiguously upstream device for UHFQA, ensure " @@ -102,7 +96,10 @@ def _error_ambiguous_upstream(self): def update_clock_source(self, force_internal: bool | None): if len(self._uplinks) == 0: - self._error_as_leader() + raise LabOneQControllerException( + f"{self.dev_repr}: UHFQA cannot be configured as leader, ensure correct DIO " + f"connection in the device setup" + ) if len(self._uplinks) > 1: self._error_ambiguous_upstream() upstream = next(iter(self._uplinks.values()))() @@ -629,9 +626,9 @@ def collect_trigger_configuration_nodes( ] ) - dio_mode = initialization.config.dio_mode + triggering_mode = initialization.config.triggering_mode - if dio_mode == DIOConfigType.HDAWG or dio_mode is None: + if triggering_mode == TriggeringMode.DIO_FOLLOWER or triggering_mode is None: nodes_to_configure_triggers.extend( [ DaqNodeSetAction(self._daq, f"/{self.serial}/dios/0/mode", 2), @@ -639,7 +636,7 @@ def collect_trigger_configuration_nodes( DaqNodeSetAction(self._daq, f"/{self.serial}/dios/0/extclk", 0x2), ] ) - elif dio_mode == DIOConfigType.DIO_FOLLOWER_OF_HDAWG_LEADER: + elif triggering_mode == TriggeringMode.DESKTOP_DIO_FOLLOWER: nodes_to_configure_triggers.extend( [ DaqNodeSetAction(self._daq, f"/{self.serial}/dios/0/mode", 0), @@ -669,9 +666,6 @@ def collect_trigger_configuration_nodes( return nodes_to_configure_triggers - def configure_as_leader(self, initialization: Initialization.Data): - self._error_as_leader() - def _get_integrator_measurement_data( self, result_index, num_results, averages_divider: int ): diff --git a/laboneq/controller/devices/device_zi.py b/laboneq/controller/devices/device_zi.py index d1acf08..7e521b4 100644 --- a/laboneq/controller/devices/device_zi.py +++ b/laboneq/controller/devices/device_zi.py @@ -6,8 +6,6 @@ import json import logging import math -import os -import os.path import re import time from abc import ABC @@ -97,6 +95,14 @@ class DeviceQualifier: options: DeviceOptions = None +@dataclass +class SequencerPaths: + elf: str + progress: str + enable: str + ready: str + + def delay_to_rounded_samples( channel: int, dev_repr: str, @@ -197,6 +203,10 @@ def interface(self): def daq(self): return self._daq + @property + def is_secondary(self) -> bool: + return False + def add_command_table_header(self, body: dict) -> dict: # Stub, implement in sub-class _logger.debug("Command table unavailable on device %s", self.dev_repr) @@ -225,21 +235,13 @@ def _process_dev_opts(self): def _get_sequencer_type(self) -> str: return "auto-detect" - def _get_sequencer_path_patterns(self) -> dict[str, str]: - return { - "elf": "/{serial}/awgs/{index}/elf/data", - "progress": "/{serial}/awgs/{index}/elf/progress", - "enable": "/{serial}/awgs/{index}/enable", - "ready": "/{serial}/awgs/{index}/ready", - } - - def get_sequencer_paths(self, index: int) -> dict[str, str]: - props = { - "serial": self.serial, - "index": index, - } - patterns = self._get_sequencer_path_patterns() - return {k: v.format(**props) for k, v in patterns.items()} + def get_sequencer_paths(self, index: int) -> SequencerPaths: + return SequencerPaths( + elf=f"/{self.serial}/awgs/{index}/elf/data", + progress=f"/{self.serial}/awgs/{index}/elf/progress", + enable=f"/{self.serial}/awgs/{index}/enable", + ready=f"/{self.serial}/awgs/{index}/ready", + ) def add_downlink(self, port: str, linked_device_uid: str, linked_device: DeviceZI): self._downlinks[port] = (linked_device_uid, ref(linked_device)) @@ -306,14 +308,6 @@ def collect_trigger_configuration_nodes( ) -> list[DaqNodeAction]: return [] - def configure_as_leader(self, initialization: Initialization.Data): - pass - - def collect_follower_configuration_nodes( - self, initialization: Initialization.Data - ) -> list[DaqNodeAction]: - return [] - def _connect_to_data_server(self): if self._connected: return @@ -477,9 +471,6 @@ def get_measurement_data( def get_input_monitor_data(self, channel: int, num_results: int): return None # default -> no results available from the device - def wait_for_conditions_to_start(self): - pass - def conditions_for_execution_ready(self) -> dict[str, Any]: return {} @@ -545,109 +536,6 @@ def _wait_for_node( if time.time() - guard_start >= guard_time: break - def _switch_reference_clock(self, source, expected_freqs): - if expected_freqs is not None and not isinstance(expected_freqs, list): - expected_freqs = [expected_freqs] - - source_path = f"/{self.serial}/system/clocks/referenceclock/in/source" - status_path = f"/{self.serial}/system/clocks/referenceclock/in/status" - sourceactual_path = ( - f"/{self.serial}/system/clocks/referenceclock/in/sourceactual" - ) - freq_path = f"/{self.serial}/system/clocks/referenceclock/in/freq" - - self._daq.batch_set( - [ - DaqNodeSetAction( - self._daq, - source_path, - source, - caching_strategy=CachingStrategy.NO_CACHE, - ) - ] - ) - - retries = 0 - timeout = 60 # s - start_time = time.time() - last_report = start_time - sourceactual = None - status = None - freq = None - - while True: - if retries > 0: - now = time.time() - elapsed = floor(now - start_time) - if now - start_time > timeout: - raise LabOneQControllerException( - f"Unable to switch reference clock within {timeout}s. " - f"Requested source: {source}, actual: {sourceactual}, status: {status}, " - f"expected frequencies: {expected_freqs}, actual: {freq}" - ) - if now - last_report > 5: - _logger.debug( - "Waiting for reference clock switching, %f s remaining " - "until %f s timeout...", - timeout - elapsed, - timeout, - ) - last_report = now - time.sleep(0.1) - retries += 1 - - daq_reply = self._daq.batch_get( - [ - DaqNodeGetAction( - self._daq, - sourceactual_path, - caching_strategy=CachingStrategy.NO_CACHE, - ) - ] - ) - sourceactual = daq_reply[sourceactual_path] - if sourceactual != source and not self.dry_run: - continue - - daq_reply = self._daq.batch_get( - [ - DaqNodeGetAction( - self._daq, - status_path, - caching_strategy=CachingStrategy.NO_CACHE, - ) - ] - ) - status = daq_reply[status_path] - if not self.dry_run: - if status == 2: # still locking - continue - if status == 1: # error while locking - raise LabOneQControllerException( - f"Unable to switch reference clock, device returned error " - f"after {elapsed}s. Requested source: {source}, actual: {sourceactual}, " - f"status: {status}, expected frequency: {expected_freqs}, actual: {freq}" - ) - - if expected_freqs is None: - break - daq_reply = self._daq.batch_get( - [ - DaqNodeGetAction( - self._daq, freq_path, caching_strategy=CachingStrategy.NO_CACHE - ) - ] - ) - freq = daq_reply[freq_path] - if freq in expected_freqs or self.dry_run: - break - else: - raise LabOneQControllerException( - f"Unexpected frequency after switching the reference clock. " - f"Requested source: {source}, actual: {sourceactual}, status: {status}, " - f"expected frequency: {expected_freqs}, actual: {freq}" - ) - def _adjust_frequency(self, freq): return freq @@ -774,16 +662,13 @@ def _prepare_wave_complex(self, waves, sig: str) -> tuple[str, npt.ArrayLike]: return sig, np.array(wave["samples"], dtype=np.complex128) - def _prepare_waves( - self, compiled: CompiledExperiment, seqc_filename: str + def prepare_waves( + self, compiled: CompiledExperiment, wave_indices_ref: str ) -> list[tuple[str, npt.ArrayLike]]: - wave_indices_filename = os.path.splitext(seqc_filename)[0] + "_waveindices.csv" + if wave_indices_ref is None: + return None wave_indices: dict[str, list[int | str]] = next( - ( - i - for i in compiled.wave_indices - if i["filename"] == wave_indices_filename - ), + (i for i in compiled.wave_indices if i["filename"] == wave_indices_ref), {"value": {}}, )["value"] @@ -798,7 +683,7 @@ def _prepare_waves( waves_by_index[idx] = self._prepare_wave_complex(waves, sig) else: raise LabOneQControllerException( - f"Unexpected signal type for binary wave for '{sig}' in '{seqc_filename}' - " + f"Unexpected signal type for binary wave for '{sig}' in '{wave_indices_ref}' - " f"'{sig_type}', should be one of [iq, double, multi, single, complex]" ) @@ -809,11 +694,13 @@ def _prepare_waves( idx += 1 return bin_waves - def _prepare_command_table( - self, compiled: CompiledExperiment, seqc_filename: str + def prepare_command_table( + self, compiled: CompiledExperiment, ct_ref: str ) -> dict | None: + if ct_ref is None: + return None command_table_body = next( - (ct["ct"] for ct in compiled.command_tables if ct["seqc"] == seqc_filename), + (ct["ct"] for ct in compiled.command_tables if ct["seqc"] == ct_ref), None, ) @@ -830,28 +717,13 @@ def _prepare_command_table( return self.add_command_table_header(command_table_body) - def prepare_seqc( - self, seqc_filename: str, compiled: CompiledExperiment - ) -> tuple[str, list[tuple[str, npt.ArrayLike]], dict[Any]]: - """ - `compiled` expected to have the following members: - - `src` -> list[dict[str, str]] - `filename` -> `` - `text` -> `` - - `waves` -> list[dict[str, str]] - `filename` -> `` - `text` -> `` - - Returns a tuple of - 1. str: seqc text to pass to the awg compiler - 2. list[(str, array)]: waves(id, samples) to upload to the instrument (ordered by index) - 3. dict: command table - """ - seqc = next((s for s in compiled.src if s["filename"] == seqc_filename), None) + def prepare_seqc(self, compiled: CompiledExperiment, seqc_ref: str) -> str: + if seqc_ref is None: + return None + + seqc = next((s for s in compiled.src if s["filename"] == seqc_ref), None) if seqc is None: - raise LabOneQControllerException( - f"SeqC program '{seqc_filename}' not found" - ) + raise LabOneQControllerException(f"SeqC program '{seqc_ref}' not found") # Substitute oscillator nodes by actual assignment seqc_lines = seqc["text"].split("\n") @@ -866,16 +738,13 @@ def prepare_seqc( ] = f"{m.group(1)}{m.group(2)}{m.group(3)}{osc.index}{m.group(4)}" seqc_text = "\n".join(seqc_lines) - bin_waves = self._prepare_waves(compiled, seqc_filename) - command_table = self._prepare_command_table(compiled, seqc_filename) - - return seqc_text, bin_waves, command_table + return seqc_text def prepare_upload_elf(self, elf: bytes, awg_index: int, filename: str): sequencer_paths = self.get_sequencer_paths(awg_index) return DaqNodeSetAction( self._daq, - sequencer_paths["elf"], + sequencer_paths.elf, elf, filename=filename, caching_strategy=CachingStrategy.NO_CACHE, diff --git a/laboneq/controller/devices/zi_emulator.py b/laboneq/controller/devices/zi_emulator.py index 50a87e6..1a4334c 100644 --- a/laboneq/controller/devices/zi_emulator.py +++ b/laboneq/controller/devices/zi_emulator.py @@ -409,17 +409,15 @@ def _node_def(self) -> dict[str, NodeInfo]: return nd -class DevEmuPQSC(DevEmuHW): - def _trig_stop(self): - self._set_val("execution/enable", 0) - - def _trig_execute(self, node: NodeBase): - self._scheduler.enter(delay=0.001, priority=0, action=self._trig_stop) - +class Gen2Base(DevEmuHW): def _ref_clock_switched(self, requested_source: int): + # 0 - INTERNAL + # 1 - EXTERNAL + # 2 - ZSYNC + freq = 10e6 if requested_source == 1 else 100e6 self._set_val("system/clocks/referenceclock/in/sourceactual", requested_source) self._set_val("system/clocks/referenceclock/in/status", 0) - self._set_val("system/clocks/referenceclock/in/freq", 10e6) + self._set_val("system/clocks/referenceclock/in/freq", freq) def _ref_clock(self, node: NodeBase): node_int: NodeInt = node @@ -430,11 +428,8 @@ def _ref_clock(self, node: NodeBase): argument=(node_int.value,), ) - def _node_def(self) -> dict[str, NodeInfo]: + def _node_def_gen2(self) -> dict[str, NodeInfo]: return { - "execution/enable": NodeInfo( - type=NodeType.INT, default=0, handler=DevEmuPQSC._trig_execute - ), "system/clocks/referenceclock/in/source": NodeInfo( type=NodeType.INT, default=0, handler=DevEmuPQSC._ref_clock ), @@ -450,7 +445,23 @@ def _node_def(self) -> dict[str, NodeInfo]: } -class DevEmuSHFQABase(DevEmuHW): +class DevEmuPQSC(Gen2Base): + def _trig_stop(self): + self._set_val("execution/enable", 0) + + def _trig_execute(self, node: NodeBase): + self._scheduler.enter(delay=0.001, priority=0, action=self._trig_stop) + + def _node_def(self) -> dict[str, NodeInfo]: + return { + **self._node_def_gen2(), + "execution/enable": NodeInfo( + type=NodeType.INT, default=0, handler=DevEmuPQSC._trig_execute + ), + } + + +class DevEmuSHFQABase(Gen2Base): def _awg_stop_qa(self, channel: int): readout_enable = self._get_node( f"qachannels/{channel}/readout/result/enable" @@ -547,11 +558,12 @@ def _node_def(self) -> dict[str, NodeInfo]: default=self._dev_opts.get("features/options", ""), ), } + nd.update(self._node_def_gen2()) nd.update(self._node_def_qa()) return nd -class DevEmuSHFSGBase(DevEmuHW): +class DevEmuSHFSGBase(Gen2Base): def _awg_stop_sg(self, channel: int): self._set_val(f"sgchannels/{channel}/awg/enable", 0) @@ -583,6 +595,7 @@ def _node_def(self) -> dict[str, NodeInfo]: default=self._dev_opts.get("features/options", ""), ), } + nd.update(self._node_def_gen2()) nd.update(self._node_def_sg()) return nd @@ -599,6 +612,7 @@ def _node_def(self) -> dict[str, NodeInfo]: default=self._dev_opts.get("features/options", "QC6CH"), ), } + nd.update(self._node_def_gen2()) nd.update(self._node_def_qa()) nd.update(self._node_def_sg()) return nd diff --git a/laboneq/controller/near_time_runner.py b/laboneq/controller/near_time_runner.py new file mode 100644 index 0000000..101c20c --- /dev/null +++ b/laboneq/controller/near_time_runner.py @@ -0,0 +1,131 @@ +# Copyright 2019 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import logging +import traceback +from typing import TYPE_CHECKING, Any + +from numpy import typing as npt + +from laboneq.controller.communication import ( + CachingStrategy, + DaqNodeAction, + DaqNodeSetAction, + batch_set, +) +from laboneq.controller.protected_session import ProtectedSession +from laboneq.controller.recipe_enums import NtStepKey +from laboneq.controller.util import LabOneQControllerException, SweepParamsTracker +from laboneq.core.types.enums.acquisition_type import AcquisitionType +from laboneq.core.types.enums.averaging_mode import AveragingMode +from laboneq.executor.executor import ExecutorBase, LoopingMode, LoopType + +if TYPE_CHECKING: + from laboneq.controller.controller import Controller + +_logger = logging.getLogger(__name__) + + +class NearTimeRunner(ExecutorBase): + def __init__(self, controller: Controller): + super().__init__(looping_mode=LoopingMode.EXECUTE) + self.controller = controller + self.user_set_nodes = [] + self.nt_loop_indices: list[int] = [] + self.sweep_params_tracker = SweepParamsTracker() + + def nt_step(self) -> NtStepKey: + return NtStepKey(indices=tuple(self.nt_loop_indices)) + + def set_handler(self, path: str, value): + dev = self.controller._devices.find_by_node_path(path) + self.user_set_nodes.append( + DaqNodeSetAction( + dev._daq, path, value, caching_strategy=CachingStrategy.NO_CACHE + ) + ) + + def user_func_handler(self, func_name: str, args: dict[str, Any]): + func = self.controller._user_functions.get(func_name) + if func is None: + raise LabOneQControllerException( + f"User function '{func_name}' is not registered." + ) + res = func(ProtectedSession(self.controller._session), **args) + user_func_results = self.controller._results.user_func_results.setdefault( + func_name, [] + ) + user_func_results.append(res) + + def set_sw_param_handler( + self, + name: str, + index: int, + value: float, + axis_name: str, + values: npt.ArrayLike, + ): + self.sweep_params_tracker.set_param(name, value) + + def for_loop_handler( + self, count: int, index: int, loop_type: LoopType, enter: bool + ): + if enter: + self.nt_loop_indices.append(index) + else: + self.nt_loop_indices.pop() + + def rt_handler( + self, + count: int, + uid: str, + averaging_mode: AveragingMode, + acquisition_type: AcquisitionType, + enter: bool, + ): + if enter: + self.controller._initialize_awgs(nt_step=self.nt_step()) + self.controller._configure_triggers() + attribute_value_tracker = ( + self.controller._recipe_data.attribute_value_tracker + ) + for param in self.sweep_params_tracker.updated_params(): + attribute_value_tracker.update( + param, self.sweep_params_tracker.get_param(param) + ) + self.sweep_params_tracker.clear_for_next_step() + + nt_sweep_nodes: list[DaqNodeAction] = [] + for device_uid, device in self.controller._devices.all: + nt_sweep_nodes.extend( + device.collect_prepare_nt_step_nodes( + attribute_value_tracker.device_view(device_uid), + self.controller._recipe_data, + ) + ) + + step_prepare_nodes = self.controller._prepare_rt_execution( + rt_section_uid=uid + ) + + batch_set([*self.user_set_nodes, *nt_sweep_nodes, *step_prepare_nodes]) + self.user_set_nodes.clear() + + for retry in range(3): # Up to 3 retries + if retry > 0: + _logger.info("Step retry %s of 3...", retry + 1) + batch_set(step_prepare_nodes) + try: + self.controller._execute_one_step(acquisition_type) + self.controller._read_one_step_results( + nt_step=self.nt_step(), rt_section_uid=uid + ) + break + except LabOneQControllerException: # TODO(2K): introduce "hard" controller exceptions + self.controller._report_step_error( + nt_step=self.nt_step(), + rt_section_uid=uid, + message=traceback.format_exc(), + ) diff --git a/laboneq/controller/recipe_1_4_0.py b/laboneq/controller/recipe_1_4_0.py index ee49cb6..d99b98e 100644 --- a/laboneq/controller/recipe_1_4_0.py +++ b/laboneq/controller/recipe_1_4_0.py @@ -1,19 +1,14 @@ # Copyright 2019 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + from dataclasses import dataclass, field -from typing import Any, AnyStr, Dict, List, Optional +from typing import Any from marshmallow import EXCLUDE, Schema, fields, post_load -from .recipe_enums import ( - DIOConfigType, - ExecutionType, - OperationType, - RefClkType, - ReferenceClockSource, - SignalType, -) +from .recipe_enums import NtStepKey, RefClkType, SignalType, TriggeringMode from .util import LabOneQControllerException @@ -30,8 +25,8 @@ class Meta: @dataclass class Data: - server_uid: AnyStr - host: AnyStr + server_uid: str + host: str port: int api_level: int @@ -48,8 +43,8 @@ class Meta: @dataclass class Data: - key: AnyStr - value: AnyStr + key: str + value: str key = fields.Str(required=True) value = fields.Str(required=True) @@ -63,9 +58,9 @@ class Meta: @dataclass class Data: - device_uid: AnyStr - driver: AnyStr - options: Optional[List[DriverOption]] = None + device_uid: str + driver: str + options: list[DriverOption] | None = None def _get_option(self, key): for option in self.options: @@ -133,22 +128,22 @@ class Meta: @dataclass class Data: channel: int - enable: Optional[bool] = None - modulation: Optional[bool] = None - oscillator: Optional[int] = None - oscillator_frequency: Optional[int] = None - offset: Optional[float] = None - gains: Optional[Gains] = None - range: Optional[float] = None - range_unit: Optional[str] = None - precompensation: Optional[Dict[str, Dict]] = None - lo_frequency: Optional[Any] = None - port_mode: Optional[str] = None - port_delay: Optional[Any] = None + enable: bool | None = None + modulation: bool | None = None + oscillator: int | None = None + oscillator_frequency: int | None = None + offset: float | None = None + gains: Gains | None = None + range: float | None = None + range_unit: str | None = None + precompensation: dict[str, dict] | None = None + lo_frequency: Any | None = None + port_mode: str | None = None + port_delay: Any | None = None scheduler_port_delay: float = 0.0 - delay_signal: Optional[float] = None - marker_mode: Optional[str] = None - amplitude: Optional[Any] = None + delay_signal: float | None = None + marker_mode: str | None = None + amplitude: Any | None = None channel = fields.Integer() enable = fields.Boolean(required=False) @@ -181,7 +176,6 @@ class AWG(QCCSSchema): class Meta: fields = ( "awg", - "seqc", "signal_type", "qa_signal_id", "command_table_match_offset", @@ -192,14 +186,12 @@ class Meta: @dataclass class Data: awg: int - seqc: str signal_type: SignalType = SignalType.SINGLE - qa_signal_id: Optional[str] = None - command_table_match_offset: Optional[int] = None - feedback_register: Optional[int] = None + qa_signal_id: str | None = None + command_table_match_offset: int | None = None + feedback_register: int | None = None awg = fields.Integer() - seqc = fields.Str() signal_type = SignalTypeField() qa_signal_id = fields.Str(required=False, allow_none=True) command_table_match_offset = fields.Integer(required=False, allow_none=True) @@ -220,115 +212,6 @@ class Data: device_uid = fields.Str() -class ExecutionTypeField(fields.Field): - def _serialize(self, value, attr, obj, **kwargs): - return value.name - - def _deserialize(self, value, attr, data, **kwargs): - return ExecutionType[value.upper()] - - -class listElement(QCCSSchema): - class Meta: - # TODO(MG) data_type is deprecated and should be removed - fields = ("source", "data_type") - ordered = True - - @dataclass - class Data: - source: AnyStr - # TODO(MG) data_type is deprecated and should be removed - data_type: AnyStr - - source = fields.Str() - # TODO(MG) data_type is deprecated and should be removed - data_type = fields.Str() - - -class Location(QCCSSchema): - @dataclass - class Data: - type: AnyStr - index: int - - type = fields.Str() - index = fields.Integer() - - -class Parameter(QCCSSchema): - class Meta: - fields = ( - "device_uid", - "location", - "parameter_uid", - "index", - "list", - "start", - "step", - ) - ordered = True - - @dataclass - class Data: - device_uid: AnyStr = None - location: Optional[Location] = None - parameter_uid: Optional[AnyStr] = None - index: Optional[int] = None - list: Optional[listElement] = None - start: Optional[float] = None - step: Optional[float] = None - - device_uid = fields.Str() - location = fields.Nested(Location) - parameter_uid = fields.Str(required=False) - index = fields.Integer(required=False) - list = fields.Nested(listElement, required=False) - start = fields.Float(required=False) - step: fields.Float(required=False) - - -class OperationTypeField(fields.Field): - def _serialize(self, value, attr, obj, **kwargs): - return value.name - - def _deserialize(self, value, attr, data, **kwargs): - return OperationType[value.upper()] - - -class SectionOperation(QCCSSchema): - class Meta: - fields = ("op_type", "operation", "args") - ordered = True - - @dataclass - class Data: - op_type: OperationType = None - operation: str = None - args: Dict[str, Any] = None - - op_type = OperationTypeField(required=True) - operation = fields.Str(required=True) - args = fields.Dict(required=False, allow_none=True) - - -class Execution(QCCSSchema): - class Meta: - fields = ("type", "count", "parameters", "children") - ordered = True - - @dataclass - class Data: - type: ExecutionType - count: int = None - parameters: List[Parameter.Data] = None - children: List[Any] = None - - type = ExecutionTypeField(required=False) - count = fields.Integer(required=False) - parameters = fields.List(fields.Nested(Parameter), required=False) - children = fields.List(fields.Nested(lambda: Execution()), required=False) - - class Measurement(QCCSSchema): class Meta: fields = ("length", "channel") @@ -357,28 +240,12 @@ def _deserialize(self, value, attr, data, **kwargs): ) -class DIOConfigTypeField(fields.Field): - def _serialize(self, value, attr, obj, **kwargs): - return value.name - - def _deserialize(self, value, attr, data, **kwargs): - return DIOConfigType[value.upper()] - - -class ReferenceClockSourceField(fields.Field): - def __init__(self, *args, **kwargs) -> None: - kwargs["allow_none"] = True - super().__init__(*args, **kwargs) - +class TriggeringModeField(fields.Field): def _serialize(self, value, attr, obj, **kwargs): - if value is None: - return None return value.name def _deserialize(self, value, attr, data, **kwargs): - if value is None: - return None - return ReferenceClockSource[value.upper()] + return TriggeringMode[value.upper()] class Config(QCCSSchema): @@ -387,9 +254,8 @@ class Meta: "repetitions", "reference_clock", "holdoff", - "dio_mode", + "triggering_mode", "sampling_rate", - "reference_clock_source", ) ordered = True @@ -398,16 +264,14 @@ class Data: repetitions: int = 1 reference_clock: RefClkType = None holdoff: float = 0 - dio_mode: DIOConfigType = DIOConfigType.HDAWG - sampling_rate: Optional[float] = None - reference_clock_source: Optional[ReferenceClockSource] = None + triggering_mode: TriggeringMode = TriggeringMode.DIO_FOLLOWER + sampling_rate: float | None = None repetitions = fields.Int() reference_clock = RefClkTypeField() holdoff = fields.Float() - dio_mode = DIOConfigTypeField() + triggering_mode = TriggeringModeField() sampling_rate = fields.Float() - reference_clock_source = ReferenceClockSourceField() class Initialization(QCCSSchema): @@ -416,7 +280,6 @@ class Meta: "device_uid", "config", "awgs", - "ports", "outputs", "inputs", "measurements", @@ -426,23 +289,21 @@ class Meta: @dataclass class Data: - device_uid: AnyStr + device_uid: str config: Config.Data - awgs: List[AWG.Data] = None - ports: List[Port.Data] = None - outputs: List[IO.Data] = None - inputs: List[IO.Data] = None - measurements: List[Measurement.Data] = field(default_factory=list) - ppchannels: Dict[int, Any] = None + awgs: list[AWG.Data] = None + outputs: list[IO.Data] = None + inputs: list[IO.Data] = None + measurements: list[Measurement.Data] = field(default_factory=list) + ppchannels: list[dict[str, Any]] | None = None device_uid = fields.Str() config = fields.Nested(Config) awgs = fields.List(fields.Nested(AWG), required=False) - ports = fields.List(fields.Nested(Port), required=False) outputs = fields.List(fields.Nested(IO), required=False) inputs = fields.List(fields.Nested(IO), required=False) measurements = fields.List(fields.Nested(Measurement), required=False) - ppchannels = fields.Dict(required=False, allow_none=True) + ppchannels = fields.List(fields.Raw, required=False, allow_none=True) class OscillatorParam(QCCSSchema): @@ -475,7 +336,7 @@ class Data: signal_id: str device_id: str awg: int - channels: List[int] + channels: list[int] weights: str = None threshold: float = 0.0 @@ -503,10 +364,42 @@ class Data: acquire_length = fields.Int() +class NtStepKeyField(fields.Field): + def _deserialize(self, value, attr, data, **kwargs): + return NtStepKey(indices=tuple(value["indices"])) + + +class RealtimeExecutionInit(QCCSSchema): + class Meta: + fields = ( + "device_id", + "awg_id", + "seqc_ref", + "wave_indices_ref", + "nt_step", + ) + ordered = True + + @dataclass + class Data: + device_id: str + awg_id: int + seqc_ref: str + wave_indices_ref: str + nt_step: NtStepKey + + device_id = fields.Str() + awg_id = fields.Int() + seqc_ref = fields.Str() + wave_indices_ref = fields.Str() + nt_step = NtStepKeyField() + + class Experiment(QCCSSchema): class Meta: fields = ( "initializations", + "realtime_execution_init", "oscillator_params", "integrator_allocations", "acquire_lengths", @@ -517,16 +410,18 @@ class Meta: @dataclass class Data: - initializations: List[Initialization.Data] - oscillator_params: List[OscillatorParam.Data] = field(default_factory=list) - integrator_allocations: List[IntegratorAllocation.Data] = field( + initializations: list[Initialization.Data] + realtime_execution_init: list[RealtimeExecutionInit.Data] + oscillator_params: list[OscillatorParam.Data] = field(default_factory=list) + integrator_allocations: list[IntegratorAllocation.Data] = field( default_factory=list ) - acquire_lengths: List[AcquireLength.Data] = field(default_factory=list) - simultaneous_acquires: List[Dict[str, str]] = field(default_factory=list) + acquire_lengths: list[AcquireLength.Data] = field(default_factory=list) + simultaneous_acquires: list[dict[str, str]] = field(default_factory=list) total_execution_time: float = None initializations = fields.List(fields.Nested(Initialization)) + realtime_execution_init = fields.List(fields.Nested(RealtimeExecutionInit)) oscillator_params = fields.List(fields.Nested(OscillatorParam), required=False) integrator_allocations = fields.List( fields.Nested(IntegratorAllocation), required=False @@ -546,10 +441,10 @@ class Meta: @dataclass class Data: - line_endings: AnyStr + line_endings: str experiment: Experiment.Data - servers: Optional[List[Server.Data]] = None - devices: Optional[List[Device.Data]] = None + servers: list[Server.Data] | None = None + devices: list[Device.Data] | None = None line_endings = fields.Str() experiment = fields.Nested(Experiment) diff --git a/laboneq/controller/recipe_enums.py b/laboneq/controller/recipe_enums.py index 40087da..f698e83 100644 --- a/laboneq/controller/recipe_enums.py +++ b/laboneq/controller/recipe_enums.py @@ -1,6 +1,7 @@ # Copyright 2019 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +from dataclasses import dataclass from enum import Enum @@ -11,29 +12,19 @@ class SignalType(Enum): MARKER = "marker" -class ExecutionType(Enum): - SINGLE = 1 - SWEEP = 2 - - class RefClkType(Enum): _10MHZ = 10 _100MHZ = 100 -class DIOConfigType(Enum): - ZSYNC_DIO = 1 - HDAWG = 2 - HDAWG_LEADER = 3 - DIO_FOLLOWER_OF_HDAWG_LEADER = 4 - - -class OperationType(Enum): - ACQUIRE = "acquire" - USER_FUNC = "user_func" - SET = "set" +class TriggeringMode(Enum): + ZSYNC_FOLLOWER = 1 + DIO_FOLLOWER = 2 + DESKTOP_LEADER = 3 + DESKTOP_DIO_FOLLOWER = 4 + INTERNAL_FOLLOWER = 5 -class ReferenceClockSource(Enum): - INTERNAL = "internal" - EXTERNAL = "external" +@dataclass(frozen=True) +class NtStepKey: + indices: tuple[int] diff --git a/laboneq/controller/recipe_processor.py b/laboneq/controller/recipe_processor.py index fb25dca..d1878fe 100644 --- a/laboneq/controller/recipe_processor.py +++ b/laboneq/controller/recipe_processor.py @@ -58,16 +58,16 @@ class AwgKey: @dataclass class AwgConfig: # QA - raw_acquire_length: int = None - result_length: int = None + raw_acquire_length: int | None = None + result_length: int | None = None acquire_signals: Set[str] = field(default_factory=set) - target_feedback_register: int = None + target_feedback_register: int | None = None # SG - qa_signal_id: str = None - command_table_match_offset: int = None - source_feedback_register: int = None - zsync_bit: int = None - feedback_register_bit: int = None + qa_signal_id: str | None = None + command_table_match_offset: int | None = None + source_feedback_register: int | None = None + zsync_bit: int | None = None + feedback_register_bit: int | None = None AwgConfigs = Dict[AwgKey, AwgConfig] @@ -523,7 +523,12 @@ def pre_process_compiled( iq_settings=_pre_process_iq_settings_hdawg(initialization) ) - execution = ExecutionFactoryFromExperiment().make(compiled_experiment.experiment) + if hasattr(compiled_experiment, "execution"): + execution = compiled_experiment.execution + else: + execution = ExecutionFactoryFromExperiment().make( + compiled_experiment.experiment + ) result_shapes, rt_execution_infos = _calculate_result_shapes(execution) awg_configs = _calculate_awg_configs(rt_execution_infos, recipe.experiment) attribute_value_tracker, oscillator_ids = _pre_process_attributes( diff --git a/laboneq/controller/results.py b/laboneq/controller/results.py index 4303d00..11bbc47 100644 --- a/laboneq/controller/results.py +++ b/laboneq/controller/results.py @@ -3,12 +3,13 @@ from __future__ import annotations -from copy import deepcopy -from typing import TYPE_CHECKING, Any, List, Union +from typing import TYPE_CHECKING, Any import numpy as np from numpy.typing import ArrayLike +from laboneq.controller.recipe_enums import NtStepKey + if TYPE_CHECKING: from laboneq.dsl.result.acquired_result import AcquiredResult from laboneq.dsl.result.results import Results @@ -22,8 +23,8 @@ def make_empty_results() -> Results: def make_acquired_result( data: ArrayLike, - axis_name: List[Union[str, List[str]]], - axis: List[Union[ArrayLike, List[ArrayLike]]], + axis_name: list[str | list[str]], + axis: list[ArrayLike | list[ArrayLike]], ) -> AcquiredResult: from laboneq.dsl.result.acquired_result import AcquiredResult @@ -32,24 +33,24 @@ def make_acquired_result( def build_partial_result( result: AcquiredResult, - nt_loop_indices: List[int], + nt_step: NtStepKey, raw_result: Any, - mapping: List[str], + mapping: list[str], handle: str, ): - result.last_nt_step = deepcopy(nt_loop_indices) - if len(np.shape(result.data)) == len(nt_loop_indices): + result.last_nt_step = list(nt_step.indices) + if len(np.shape(result.data)) == len(nt_step.indices): # No loops in RT, just a single value produced for raw_result_idx in range(len(raw_result)): if mapping[raw_result_idx % len(mapping)] == handle: - if len(nt_loop_indices) == 0: + if len(nt_step.indices) == 0: result.data = raw_result[raw_result_idx] else: - result.data[tuple(nt_loop_indices)] = raw_result[raw_result_idx] + result.data[nt_step.indices] = raw_result[raw_result_idx] break else: inner_res = result.data - for index in nt_loop_indices: + for index in nt_step.indices: inner_res = inner_res[index] res_flat = np.ravel(inner_res) res_flat_idx = 0 diff --git a/laboneq/core/types/compiled_experiment.py b/laboneq/core/types/compiled_experiment.py index cf462a0..03427b6 100644 --- a/laboneq/core/types/compiled_experiment.py +++ b/laboneq/core/types/compiled_experiment.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any from laboneq.core.types.enums.mixer_type import MixerType from laboneq.core.validators import dicts_equal @@ -27,11 +27,11 @@ class PulseInstance: modulation_phase: float = None channel: int = None # The AWG channel for rf_signals needs_conjugate: bool = False # SHF devices need that for now - play_pulse_parameters: Optional[Dict[str, Any]] = field(default_factory=dict) - pulse_pulse_parameters: Optional[Dict[str, Any]] = field(default_factory=dict) + play_pulse_parameters: dict[str, Any] = field(default_factory=dict) + pulse_pulse_parameters: dict[str, Any] = field(default_factory=dict) # uid of pulses that this instance overlaps with - overlaps: List[str] = None + overlaps: list[str] = None has_marker1: bool = False has_marker2: bool = False can_compress: bool = False @@ -45,8 +45,8 @@ class PulseWaveformMap: length_samples: int signal_type: str # UHFQA's HW modulation is not an IQ mixer. None for flux pulses etc. - mixer_type: Optional[MixerType] = field(default=None) - instances: List[PulseInstance] = field(default_factory=list) + mixer_type: MixerType | None = None + instances: list[PulseInstance] = field(default_factory=list) @dataclass @@ -55,7 +55,7 @@ class PulseMapEntry: # key: waveform signature string #: A mapping of signals to :py:class:`PulseWaveformMap` - waveforms: Dict[str, PulseWaveformMap] = field(default_factory=dict) + waveforms: dict[str, PulseWaveformMap] = field(default_factory=dict) @dataclass(init=True, repr=True, order=True) @@ -69,31 +69,31 @@ class CompiledExperiment: experiment: Experiment = field(default=None) #: Instructions to the controller for running the experiment. - recipe: Dict[str, Any] = field(default=None) + recipe: dict[str, Any] = field(default=None) #: The seqC source code, per device. - src: List[Dict[str, str]] = field(default=None) + src: list[dict[str, str]] = field(default=None) #: The waveforms that will be uploaded to the devices. - waves: List[Dict[str, Any]] = field(default=None) + waves: list[dict[str, Any]] = field(default=None) #: Data structure for storing the indices or filenames by which the waveforms are #: referred to during and after upload. - wave_indices: List[Dict[str, Any]] = field(default=None) + wave_indices: list[dict[str, Any]] = field(default=None) - #: Datastructure for storing the command table data - command_tables: List[Dict[str, Any]] = field(default_factory=list) + #: Data structure for storing the command table data + command_tables: list[dict[str, Any]] = field(default_factory=list) - #: List of events as scheduled by the compiler. - schedule: Dict[str, Any] = field(default=None) + #: list of events as scheduled by the compiler. + schedule: dict[str, Any] = field(default=None) #: A representation of the source experiment, using primitive Python datatypes only #: (dicts, lists, etc.) - experiment_dict: Dict[str, Any] = field(default=None) + experiment_dict: dict[str, Any] = field(default=None) #: Data structure for mapping pulses (in the experiment) to waveforms (on the #: device). - pulse_map: Dict[str, PulseMapEntry] = field(default=None) + pulse_map: dict[str, PulseMapEntry] = field(default=None) def __eq__(self, other: CompiledExperiment): if self is other: @@ -113,9 +113,7 @@ def __eq__(self, other: CompiledExperiment): and self.pulse_map == other.pulse_map ) - def replace_pulse( - self, pulse_uid: Union[str, Pulse], pulse_or_array: "Union[ArrayLike, Pulse]" - ): + def replace_pulse(self, pulse_uid: str | Pulse, pulse_or_array: ArrayLike | Pulse): """Permanently replaces specific pulse with the new sample data in the compiled experiment. Previous pulse data is lost. diff --git a/laboneq/core/types/enums/high_pass_compensation_clearing.py b/laboneq/core/types/enums/high_pass_compensation_clearing.py index 8366698..ee3511f 100644 --- a/laboneq/core/types/enums/high_pass_compensation_clearing.py +++ b/laboneq/core/types/enums/high_pass_compensation_clearing.py @@ -5,6 +5,13 @@ class HighPassCompensationClearing(Enum): + """High-pass compensation clearing. + + .. deprecated:: 2.8 + + This has no functionality. + """ + LEVEL = auto() RISE = auto() FALL = auto() diff --git a/laboneq/core/utilities/replace_pulse.py b/laboneq/core/utilities/replace_pulse.py index 316d293..6f1a654 100644 --- a/laboneq/core/utilities/replace_pulse.py +++ b/laboneq/core/utilities/replace_pulse.py @@ -7,7 +7,7 @@ from copy import deepcopy from dataclasses import dataclass from enum import Enum, auto -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING import numpy as np from numpy.typing import ArrayLike @@ -40,11 +40,11 @@ class Component(Enum): def _replace_pulse_in_wave( compiled_experiment: CompiledExperiment, wave_name: str, - pulse_or_array: Union[ArrayLike, Pulse], + pulse_or_array: ArrayLike | Pulse, pwm: PulseWaveformMap, component: Component = Component.COMPLEX, is_complex: bool = True, - current_waves: Optional[List] = None, + current_waves: list | None = None, ): current_wave = None if current_waves is not None: @@ -152,15 +152,15 @@ class WaveReplacement: awg_id: str sig_string: str replacement_type: ReplacementType - samples: List[ArrayLike] + samples: list[ArrayLike] def calc_wave_replacements( compiled_experiment: CompiledExperiment, - pulse_uid: Union[str, Pulse], - pulse_or_array: Union[ArrayLike, Pulse], - current_waves: Optional[List] = None, -) -> List[WaveReplacement]: + pulse_uid: str | Pulse, + pulse_or_array: ArrayLike | Pulse, + current_waves: list | None = None, +) -> list[WaveReplacement]: if not isinstance(pulse_uid, str): pulse_uid = pulse_uid.uid pm = compiled_experiment.pulse_map.get(pulse_uid) @@ -168,10 +168,10 @@ def calc_wave_replacements( _logger.warning("No mapping found for pulse '%s' - ignoring", pulse_uid) return [] - replacements: List[WaveReplacement] = [] + replacements: list[WaveReplacement] = [] for sig_string, pwm in pm.waveforms.items(): for awgs in compiled_experiment.wave_indices: - awg_wave_map: Dict[str, List[Union[int, str]]] = awgs["value"] + awg_wave_map: dict[str, list[int | str]] = awgs["value"] target_wave = awg_wave_map.get(sig_string) if target_wave is None: continue @@ -222,16 +222,21 @@ def calc_wave_replacements( current_waves=current_waves, ) replacements.append( - WaveReplacement(awgs["filename"], sig_string, replacement_type, samples) + WaveReplacement( + awg_id=awgs["filename"], + sig_string=sig_string, + replacement_type=replacement_type, + samples=samples, + ) ) return replacements def replace_pulse( - target: Union[CompiledExperiment, Session], - pulse_uid: Union[str, Pulse], - pulse_or_array: Union[ArrayLike, Pulse], + target: CompiledExperiment | Session, + pulse_uid: str | Pulse, + pulse_or_array: ArrayLike | Pulse, ): """Replaces specific pulse with the new sample data. diff --git a/laboneq/data/__init__.py b/laboneq/data/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/data/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/data/calibration/__init__.py b/laboneq/data/calibration/__init__.py index fa169f3..4a23955 100644 --- a/laboneq/data/calibration/__init__.py +++ b/laboneq/data/calibration/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 @@ -32,21 +32,28 @@ def __repr__(self): return f"{self.__class__.__name__}.{self.name}" -class HighPassCompensationClearing(Enum): - BOTH = auto() - FALL = auto() - LEVEL = auto() - RISE = auto() - - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - - # # Data Classes # +@dataclass +class Calibration: + calibration_items: Dict = field(default_factory=dict) + + +@dataclass +class MixerCalibration: + uid: str = None + voltage_offsets: Optional[List[float]] = None + correction_matrix: Optional[List[List[float]]] = None + + +@dataclass +class Signal: + uid: str = None + + @dataclass class BounceCompensation: delay: float = None @@ -67,19 +74,6 @@ class FIRCompensation: @dataclass class HighPassCompensation: timeconstant: float = None - clearing: HighPassCompensationClearing = None - - -@dataclass -class Calibration: - calibration_items: Dict = field(default_factory=dict) - - -@dataclass -class MixerCalibration: - uid: str = None - voltage_offsets: Optional[List[float]] = None - correction_matrix: Optional[List[List[float]]] = None @dataclass @@ -97,8 +91,3 @@ class Precompensation: high_pass: Optional[HighPassCompensation] = None bounce: Optional[BounceCompensation] = None FIR: Optional[FIRCompensation] = None - - -@dataclass -class Signal: - uid: str = None diff --git a/laboneq/data/compilation_job/__init__.py b/laboneq/data/compilation_job/__init__.py index 89008f3..27ad53b 100644 --- a/laboneq/data/compilation_job/__init__.py +++ b/laboneq/data/compilation_job/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 @@ -25,6 +25,14 @@ def __repr__(self): return f"{self.__class__.__name__}.{self.name}" +class ReferenceClockSourceInfo(Enum): + INTERNAL = auto() + EXTERNAL = auto() + + def __repr__(self): + return f"{self.__class__.__name__}.{self.name}" + + class SectionInfoAlignment(Enum): LEFT = auto() RIGHT = auto() @@ -51,6 +59,7 @@ def __repr__(self): class DeviceInfo: uid: str = None device_type: DeviceInfoType = None + reference_clock_source: ReferenceClockSourceInfo = None @dataclass diff --git a/laboneq/data/execution_payload/__init__.py b/laboneq/data/execution_payload/__init__.py index 6a9fc9a..397cc79 100644 --- a/laboneq/data/execution_payload/__init__.py +++ b/laboneq/data/execution_payload/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 @@ -79,9 +79,8 @@ class TargetServer: @dataclass class InitializationConfiguration: - reference_clock_source: str = None reference_clock: float = None - dio_mode: str = None + triggering_mode: str = None @dataclass @@ -91,6 +90,11 @@ class JobParameter: axis_name: str = None +@dataclass +class NtStepKey: + indices: List[int] = field(default_factory=list) + + @dataclass class SourceCode: uid: str = None @@ -124,6 +128,15 @@ class NearTimeOperation: parameters: List[JobParameter] = field(default_factory=list) +@dataclass +class RealTimeExecutionInit: + device: TargetDevice = None + awg_id: int = None + seqc: SourceCode = None + wave_indices_ref: str = None + nt_step: NtStepKey = None + + @dataclass class WaveForm: uid: str = None @@ -142,6 +155,7 @@ class NearTimeProgram: class Recipe: uid: str = None initializations: List[Initialization] = field(default_factory=list) + realtime_execution_init: List[RealTimeExecutionInit] = field(default_factory=list) waveforms: List[WaveForm] = field(default_factory=list) measurement_map: Dict[str, str] = field(default_factory=dict) diff --git a/laboneq/data/execution_payload/execution_payload_helper.py b/laboneq/data/execution_payload/execution_payload_helper.py new file mode 100644 index 0000000..8c3002f --- /dev/null +++ b/laboneq/data/execution_payload/execution_payload_helper.py @@ -0,0 +1,23 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from laboneq.core.serialization.simple_serialization import serialize_to_dict +from laboneq.data.execution_payload import NearTimeProgram + + +class ExecutionPayloadHelper: + @staticmethod + def dump_near_time_program(near_time_program: NearTimeProgram): + return serialize_to_dict(near_time_program) + + @staticmethod + def descend(current_node, visitor, context, parent): + for c in current_node.children: + ExecutionPayloadHelper.descend(c, visitor, context, current_node) + visitor(current_node, context, parent) + + @staticmethod + def accept_near_time_program_visitor( + near_time_program: NearTimeProgram, visitor, context=None + ): + ExecutionPayloadHelper.descend(near_time_program, visitor, context, None) diff --git a/laboneq/data/experiment_description/__init__.py b/laboneq/data/experiment_description/__init__.py index f9e1d37..bbc678f 100644 --- a/laboneq/data/experiment_description/__init__.py +++ b/laboneq/data/experiment_description/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 @@ -65,6 +65,22 @@ def __repr__(self): # +@dataclass +class SignalCalibration: + uid: str = None + oscillator: Optional[Any] = None + local_oscillator: Optional[Any] = None + mixer_calibration: Optional[Any] = None + precompensation: Optional[Any] = None + port_delay: Optional[Any] = None + port_mode: Optional[Any] = None + delay_signal: Optional[Any] = None + voltage_offset: Optional[Any] = None + range: Any = None + threshold: Optional[Any] = None + amplitude: Optional[Any] = None + + @dataclass class Operation: uid: str = None @@ -149,7 +165,7 @@ class Delay(Operation): @dataclass class Experiment: uid: str = None - signals: List[ExperimentSignal] = field(default_factory=list) + signals: Union[Dict[str, ExperimentSignal], List[ExperimentSignal]] = None epsilon: float = None sections: List[Section] = field(default_factory=list) pulses: List[Pulse] = field(default_factory=list) @@ -219,22 +235,6 @@ class Set(Operation): value: Any = None -@dataclass -class SignalCalibration: - uid: str = None - oscillator: Optional[Any] = None - local_oscillator: Optional[Any] = None - mixer_calibration: Optional[Any] = None - precompensation: Optional[Any] = None - port_delay: Optional[Any] = None - port_mode: Optional[Any] = None - delay_signal: Optional[Any] = None - voltage_offset: Optional[Any] = None - range: Any = None - threshold: Optional[Any] = None - amplitude: Optional[Any] = None - - @dataclass class Sweep(Section): uid: str = None diff --git a/laboneq/data/experiment_results/__init__.py b/laboneq/data/experiment_results/__init__.py index 8764797..5d94083 100644 --- a/laboneq/data/experiment_results/__init__.py +++ b/laboneq/data/experiment_results/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/data/experiment_schedule/__init__.py b/laboneq/data/experiment_schedule/__init__.py new file mode 100644 index 0000000..03b1509 --- /dev/null +++ b/laboneq/data/experiment_schedule/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + + +# __init__.py of 'experiment_schedule' package - autogenerated, do not edit +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import Dict, List + +# +# Enums +# + +# +# Data Classes +# + + +@dataclass +class Event: + id: int = None + time: float = None + event_type: str = None + event_data: Dict = field(default_factory=dict) + + +@dataclass +class SectionStructure: + structure: Dict = field(default_factory=dict) + + +@dataclass +class ExperimentSchedule: + uid: str = None + events: List[Event] = field(default_factory=list) + section_structure: SectionStructure = None + experiment_hash: str = None + compiled_experiment_hash: str = None diff --git a/laboneq/data/scheduled_experiment/__init__.py b/laboneq/data/scheduled_experiment/__init__.py index d9e7035..1129e37 100644 --- a/laboneq/data/scheduled_experiment/__init__.py +++ b/laboneq/data/scheduled_experiment/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/data/setup_description/__init__.py b/laboneq/data/setup_description/__init__.py index e5a62ed..5aea6b6 100644 --- a/laboneq/data/setup_description/__init__.py +++ b/laboneq/data/setup_description/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/dsl/calibration/precompensation.py b/laboneq/dsl/calibration/precompensation.py index 3844c84..3664a29 100644 --- a/laboneq/dsl/calibration/precompensation.py +++ b/laboneq/dsl/calibration/precompensation.py @@ -3,6 +3,7 @@ from __future__ import annotations +import warnings from dataclasses import dataclass, field from typing import List, Optional @@ -34,12 +35,27 @@ class ExponentialCompensation(Observable): @dataclass class HighPassCompensation(Observable): - """Data object containing highpass filter parameters for the signal precompensation""" + """Data object containing highpass filter parameters for the signal precompensation. + + .. versionchanged:: 2.8 + + Deprecated `clearing` argument: It has no functionality. + """ #: high-pass filter time constant timeconstant: float = 1e-6 - #: choose the clearing mode of the high-pass filter - clearing: HighPassCompensationClearing = HighPassCompensationClearing.RISE + #: Deprecated. Choose the clearing mode of the high-pass filter + clearing: HighPassCompensationClearing = field(default=None) + + def __post_init__(self): + if self.clearing is not None: + warnings.warn( + "`HighPassCompensation` argument `clearing` will be removed in the future versions. It has no functionality.", + FutureWarning, + ) + else: + self.clearing = HighPassCompensationClearing.RISE + super().__post_init__() @dataclass diff --git a/laboneq/dsl/device/qubits.py b/laboneq/dsl/device/qubits.py deleted file mode 100644 index 0d735ad..0000000 --- a/laboneq/dsl/device/qubits.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2022 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - -import os -import uuid -from abc import ABC -from copy import copy -from dataclasses import dataclass -from typing import Any, Dict, Union - -from laboneq.dsl.serialization import Serializer - -from .io_units import LogicalSignal -from .logical_signal_group import LogicalSignalGroup - - -@dataclass(init=False, repr=True) -class QuantumElement(ABC): - """An abstract base class for quantum elements.""" - - uid: str - signals: Dict[str, str] - parameters: Dict[str, Any] - - def __init__( - self, - uid: str = None, - signals: Dict[str, LogicalSignal] = None, - logical_signal_group: LogicalSignalGroup = None, - parameters: Dict[str, Any] = None, - ): - """ - Initializes a new QuantumElement object. - - Args: - uid: A unique identifier for the quantum element. - signals: A dictionary of logical signals associated with the quantum element. - logical_signal_group: A logical signal group associated with the quantum element. - parameters: A dictionary of parameters associated with the quantum element. - """ - self.uid = uuid.uuid4().hex if uid is None else uid - self.signals = ( - { - k: (v.uid if isinstance(v, LogicalSignal) else v) - for k, v in signals.items() - } - if signals - else {} - ) - self._parameters = {} if parameters is None else parameters - if logical_signal_group is not None: - if signals: - raise ValueError("Cannot have both signals and logical signal_group") - else: - self.signals = self._parse_signals(logical_signal_group) - - def __hash__(self): - return hash(self.uid) - - @property - def parameters(self): - return copy(self._parameters) - - @classmethod - def load(cls, filename: Union[str, bytes, os.PathLike]) -> "QuantumElement": - """ - Loads a QuantumElement object from a JSON file. - - Args: - filename: The name of the JSON file to load the QuantumElement object from. - """ - return cls.from_json(filename) - - @classmethod - def from_json(cls, filename: Union[str, bytes, os.PathLike]) -> "QuantumElement": - """Loads a QuantumElement object from a JSON file. - - Args: - filename: The name of the JSON file to load the QuantumElement object from. - """ - return Serializer.from_json_file(filename, cls) - - def _parse_signals( - self, logical_signal_group: LogicalSignalGroup - ) -> Dict[str, str]: - return {k: v.uid for (k, v) in logical_signal_group.logical_signals.items()} - - def add_signals(self, signals: Dict[str, LogicalSignal]): - """ - Adds logical signals to the quantum element. - - Args: - signals: A dictionary of logical signals to add to the quantum element. - """ - self.signals.update({k: v.uid for (k, v) in signals.items()}) - - def set_signal_group(self, logical_signal_group: LogicalSignalGroup): - """ - Sets the logical signal group for the quantum element. - - Args: - logical_signal_group: The logical signal group to set for the quantum element. - """ - self.signals.update(self._parse_signals(logical_signal_group)) - - def set_parameters(self, parameters: Dict[str, Any]): - """ - Sets the parameters for the quantum element. - Allowed datatypes for the parameters are the following: Integer, Boolean, Float, Complex numbers, - Numpy arrays of the above, Strings, Dictionaries of the above, LabOne Q types and None. - - Args: - parameters: A dictionary of parameters to set for the quantum element. - """ - self._parameters.update(parameters) - - def save(self, filename: Union[str, bytes, os.PathLike]): - """ - Save a QuantumElement object to a JSON file. - - Args: - filename: The name of the JSON file to save the QuantumElement object. - """ - self.to_json(filename) - - def to_json(self, filename: Union[str, bytes, os.PathLike]): - """ - Save a QuantumElement object to a JSON file. - - Args: - filename: The name of the JSON file to save the QuantumElement object. - """ - Serializer.to_json_file(self, filename) - - -@dataclass(init=False, repr=True, eq=False) -class Qubit(QuantumElement): - """A class for generic qubits.""" - - ... diff --git a/laboneq/dsl/experiment/experiment.py b/laboneq/dsl/experiment/experiment.py index 09a2493..2d1f70b 100644 --- a/laboneq/dsl/experiment/experiment.py +++ b/laboneq/dsl/experiment/experiment.py @@ -37,6 +37,16 @@ def experiment_id_generator(): @dataclass(init=True, repr=True, order=True) class Experiment: + """LabOne Q Experiment. + + Args: + uid: UID of the experiment. + signals: Experiment signals. + version: Used DSL version. + epsilon: Epsilon. Not used. + sections: Sections in the experiment. + """ + uid: str = field(default_factory=experiment_id_generator) signals: Union[Dict[str, ExperimentSignal], List[ExperimentSignal]] = field( default_factory=dict @@ -45,7 +55,7 @@ class Experiment: epsilon: float = field(default=0.0) sections: List[Section] = field(default_factory=list) _section_stack: Deque[Section] = field( - default_factory=deque, repr=False, compare=False + default_factory=deque, repr=False, compare=False, init=False ) def __post_init__(self): diff --git a/laboneq/dsl/laboneq_facade.py b/laboneq/dsl/laboneq_facade.py index ba47f21..55288c1 100644 --- a/laboneq/dsl/laboneq_facade.py +++ b/laboneq/dsl/laboneq_facade.py @@ -9,7 +9,7 @@ from numpy import typing as npt from laboneq import controller as ctrl -from laboneq.compiler import Compiler +from laboneq.compiler.workflow.compiler import Compiler from laboneq.core.types import CompiledExperiment if TYPE_CHECKING: diff --git a/laboneq/dsl/quantum/__init__.py b/laboneq/dsl/quantum/__init__.py new file mode 100644 index 0000000..0e9fd4f --- /dev/null +++ b/laboneq/dsl/quantum/__init__.py @@ -0,0 +1,5 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from laboneq.dsl.quantum.quantum_operations import QuantumOperation +from laboneq.dsl.quantum.qubits import QuantumElement, Qubit, QubitParameters diff --git a/laboneq/dsl/device/quantum_operations.py b/laboneq/dsl/quantum/quantum_operations.py similarity index 98% rename from laboneq/dsl/device/quantum_operations.py rename to laboneq/dsl/quantum/quantum_operations.py index 7dce430..80c2f7c 100644 --- a/laboneq/dsl/device/quantum_operations.py +++ b/laboneq/dsl/quantum/quantum_operations.py @@ -6,8 +6,8 @@ from os import PathLike from typing import Dict, Optional, Tuple, Union -from laboneq.dsl.device.qubits import QuantumElement from laboneq.dsl.experiment.section import Section +from laboneq.dsl.quantum.qubits import QuantumElement from laboneq.dsl.serialization import Serializer QuantumElementTuple = Tuple[QuantumElement, ...] diff --git a/laboneq/dsl/quantum/qubits.py b/laboneq/dsl/quantum/qubits.py new file mode 100644 index 0000000..2e05c21 --- /dev/null +++ b/laboneq/dsl/quantum/qubits.py @@ -0,0 +1,329 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import os +import uuid +from abc import ABC, abstractmethod +from collections.abc import MutableMapping +from dataclasses import dataclass, field +from enum import Enum +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from laboneq.core.exceptions import LabOneQException +from laboneq.dsl.calibration import Calibration, Oscillator, SignalCalibration +from laboneq.dsl.device.io_units import LogicalSignal +from laboneq.dsl.experiment import ExperimentSignal +from laboneq.dsl.serialization import Serializer + + +class SignalType(Enum): + DRIVE = "drive" + MEASURE = "measure" + ACQUIRE = "acquire" + FLUX = "flux" + + +class QuantumElementSignalMap(MutableMapping): + def __init__( + self, items: Dict[str, str], key_validator: Callable[[str], None] = None + ) -> None: + """A mapping between signal. + + Args: + items: Mapping between the signal names. + key_validator: Callable to validate mapping keys. + """ + self._items = {} + self._key_validator = key_validator + if self._key_validator: + for k, v in items.items(): + self._items[self._key_validator(k)] = v + else: + self._items = items + + def __getitem__(self, key: Any): + return self._items[key] + + def __setitem__(self, key: Any, value: Any): + if self._key_validator: + self._key_validator(key) + self._items[key] = value + + def __delitem__(self, key: Any): + del self._items[key] + + def __iter__(self): + return iter(self._items) + + def __len__(self): + return len(self._items) + + def __eq__(self, __o: object) -> bool: + if isinstance(__o, dict): + return __o == self._items + return super().__eq__(__o) + + def __repr__(self): + return repr(self._items) + + +@dataclass(init=False, repr=True) +class QuantumElement(ABC): + """An abstract base class for quantum elements.""" + + uid: str + signals: Dict[str, str] + parameters: Dict[str, Any] + + def __init__( + self, + uid: str = None, + signals: Dict[str, LogicalSignal] = None, + parameters: Dict[str, Any] = None, + ): + """ + Initializes a new QuantumElement object. + + Args: + uid: A unique identifier for the quantum element. + signals: A dictionary of logical signals associated with the quantum element. + parameters: A dictionary of parameters associated with the quantum element. + """ + self.uid = uuid.uuid4().hex if uid is None else uid + if signals is None: + signals = QuantumElementSignalMap({}) + if isinstance(signals, dict): + sigs = { + k: self._resolve_to_logical_signal_uid(v) for k, v in signals.items() + } + self.signals = QuantumElementSignalMap(sigs) + else: + self.signals = signals + self._parameters = {} if parameters is None else parameters + + def __hash__(self): + return hash(self.uid) + + @staticmethod + def _resolve_to_logical_signal_uid(signal: Union[str, LogicalSignal]) -> str: + return signal.path if isinstance(signal, LogicalSignal) else signal + + @property + def parameters(self): + """Parameters of the element.""" + return self._parameters + + @classmethod + def load(cls, filename: Union[str, bytes, os.PathLike]) -> "QuantumElement": + """ + Loads a QuantumElement object from a JSON file. + + Args: + filename: The name of the JSON file to load the QuantumElement object from. + """ + return cls.from_json(filename) + + @classmethod + def from_json(cls, filename: Union[str, bytes, os.PathLike]) -> "QuantumElement": + """Loads a QuantumElement object from a JSON file. + + Args: + filename: The name of the JSON file to load the QuantumElement object from. + """ + return Serializer.from_json_file(filename, cls) + + def save(self, filename: Union[str, bytes, os.PathLike]): + """ + Save a QuantumElement object to a JSON file. + + Args: + filename: The name of the JSON file to save the QuantumElement object. + """ + self.to_json(filename) + + def to_json(self, filename: Union[str, bytes, os.PathLike]): + """ + Save a QuantumElement object to a JSON file. + + Args: + filename: The name of the JSON file to save the QuantumElement object. + """ + Serializer.to_json_file(self, filename) + + def add_signals(self, signals: Dict[str, LogicalSignal]): + """ + Adds logical signals to the quantum element. + + Args: + signals: A dictionary of logical signals to add to the quantum element. + """ + self.signals.update( + {k: self._resolve_to_logical_signal_uid(v) for (k, v) in signals.items()} + ) + + @abstractmethod + def calibration(self) -> Calibration: + """Calibration of the Quantum element.""" + pass + + def experiment_signals(self) -> List[ExperimentSignal]: + """Experiment signals of the quantum element.""" + sigs = [] + for k, v in self.calibration().items(): + sig = ExperimentSignal( + uid=k, + calibration=v, + map_to=k, + ) + sigs.append(sig) + return sigs + + +@dataclass +class QubitParameters: + #: Resonance frequency of the qubit. + res_frequency: float + #: Local oscillator frequency. + lo_frequency: float + #: Readout resonance frequency of the qubit. + readout_res_frequency: float + #: Readout local oscillator frequency. + readout_lo_frequency: float + #: Free form dictionary of user defined parameters. + user_defs: Dict = field(default_factory=dict) + + @property + def drive_frequency(self) -> float: + """Qubit drive frequency.""" + return self.res_frequency - self.lo_frequency + + @property + def readout_frequency(self) -> float: + """Readout baseband frequency.""" + return self.readout_res_frequency - self.readout_lo_frequency + + +@dataclass(init=False, repr=True, eq=False) +class Qubit(QuantumElement): + """A class for a generic Qubit.""" + + def __init__( + self, + uid: str = None, + signals: Dict[str, LogicalSignal] = None, + parameters: Optional[Union[QubitParameters, Dict[str, Any]]] = None, + ): + """ + Initializes a new Qubit. + + Args: + uid: A unique identifier for the Qubit. + signals: A mapping of logical signals associated with the qubit. + Qubit accepts the following keys in the mapping: 'drive', 'measure', 'acquire', 'flux' + + This is so that the Qubit parameters are assigned into the correct signal lines in + calibration. + parameters: Parameters associated with the qubit. + """ + if isinstance(parameters, dict): + parameters = QubitParameters(**parameters) + if signals is None: + signals = QuantumElementSignalMap( + {}, key_validator=self._validate_signal_type + ) + if isinstance(signals, dict): + signals = QuantumElementSignalMap( + signals, key_validator=self._validate_signal_type + ) + super().__init__(uid, signals, parameters) + + @staticmethod + def _validate_signal_type(name: str) -> str: + try: + SignalType(name) + return name + except ValueError: + raise LabOneQException( + f"Signal {name} is not one of {[enum.value for enum in SignalType]}" + ) + + @classmethod + def from_logical_signal_group( + cls, + uid: str, + lsg, + parameters: Optional[Union[QubitParameters, Dict[str, Any]]] = None, + ) -> "Qubit": + """Qubit from logical signal group. + + Args: + uid: A unique identifier for the Qubit. + lsg: Logical signal group. + Qubit understands the following signal line names: + + - drive: 'drive', 'drive_line' + - measure: 'measure', 'measure_line' + - acquire: 'acquire', 'acquire_line' + - flux: 'flux', 'flux_line' + + This is so that the Qubit parameters are assigned into the correct signal lines in + calibration. + parameters: Parameters associated with the qubit. + """ + signal_map = {} + for name, sig in lsg.logical_signals.items(): + sig_type = name + if name in ["drive", "drive_line"]: + sig_type = SignalType.DRIVE.value + if name in ["measure", "measure_line"]: + sig_type = SignalType.MEASURE.value + if name in ["acquire", "acquire_line"]: + sig_type = SignalType.ACQUIRE.value + if name in ["flux", "flux_line"]: + sig_type = SignalType.FLUX.value + signal_map[sig_type] = cls._resolve_to_logical_signal_uid(sig) + return cls( + uid=uid, signals=QuantumElementSignalMap(signal_map), parameters=parameters + ) + + def calibration(self) -> Calibration: + """Generate calibration from parameters.""" + calibs = {} + calibs[self.signals["drive"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_drive_osc", frequency=self.parameters.drive_frequency + ) + ) + calibs[self.signals["measure"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_measure_osc", + frequency=self.parameters.readout_frequency, + ) + ) + calibs[self.signals["acquire"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_acquire_osc", + frequency=self.parameters.readout_frequency, + ) + ) + return Calibration(calibs) + + def experiment_signals( + self, with_types=False + ) -> Union[List[ExperimentSignal], List[Tuple[SignalType, ExperimentSignal]]]: + """Experiment signals of the quantum element. + + Args: + with_types: Return a list of tuples which consist of an mapped logical signal type and an experiment signal. + """ + exp_signals = super().experiment_signals() + if with_types: + sigs = [] + for exp_sig in exp_signals: + for role, signal in self.signals.items(): + if signal == exp_sig.mapped_logical_signal_path: + role = SignalType(role) + sigs.append((role, exp_sig)) + break + return sigs + return exp_signals diff --git a/laboneq/dsl/result/results.py b/laboneq/dsl/result/results.py index 7174d58..00ee07c 100644 --- a/laboneq/dsl/result/results.py +++ b/laboneq/dsl/result/results.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any from laboneq.core.exceptions import LabOneQException @@ -32,10 +32,10 @@ class Results: compiled_experiment: CompiledExperiment = field(default=None) #: The acquired results, organized by handle. - acquired_results: Dict[str, AcquiredResult] = field(default=None) + acquired_results: dict[str, AcquiredResult] = field(default=None) #: List of the results of each user user function, by name of the function. - user_func_results: Dict[str, List[Any]] = field(default=None) + user_func_results: dict[str, list[Any]] = field(default=None) #: Any exceptions that occurred during the execution of the experiment. Entries are #: tuples of @@ -43,7 +43,7 @@ class Results: #: * the indices of the loops where the error occurred, #: * the section uid, #: * the error message. - execution_errors: List[Tuple[List[int], str, str]] = field(default=None) + execution_errors: list[tuple[list[int], str, str]] = field(default=None) def __eq__(self, other): if self is other: @@ -99,7 +99,7 @@ def get_data(self, handle: str) -> ArrayLike: self._check_handle(handle) return self.acquired_results[handle].data - def get_axis_name(self, handle: str) -> List[Union[str, List[str]]]: + def get_axis_name(self, handle: str) -> list[str | list[str]]: """Returns the names of axes. Returns the list of axis names, that correspond to the dimensions of the result returned by @@ -121,7 +121,7 @@ def get_axis_name(self, handle: str) -> List[Union[str, List[str]]]: self._check_handle(handle) return self.acquired_results[handle].axis_name - def get_axis(self, handle: str) -> List[Union[ArrayLike, List[ArrayLike]]]: + def get_axis(self, handle: str) -> list[ArrayLike | list[ArrayLike]]: """Returns the axes grids. Returns the list, where each element represents an axis of the corresponding dimension of @@ -144,7 +144,7 @@ def get_axis(self, handle: str) -> List[Union[ArrayLike, List[ArrayLike]]]: self._check_handle(handle) return self.acquired_results[handle].axis - def get_last_nt_step(self, handle: str) -> List[int]: + def get_last_nt_step(self, handle: str) -> list[int]: """Returns the list of axis indices of the last measured near-time point. Returns the list of axis indices that represent the last measured near-time point. Use this @@ -167,7 +167,7 @@ def get_last_nt_step(self, handle: str) -> List[int]: return self.acquired_results[handle].last_nt_step @property - def device_calibration(self) -> Optional[Calibration]: + def device_calibration(self) -> Calibration | None: """Get the device setup's calibration. See Also: diff --git a/laboneq/dsl/serialization/serializer.py b/laboneq/dsl/serialization/serializer.py index e5509d7..9206566 100644 --- a/laboneq/dsl/serialization/serializer.py +++ b/laboneq/dsl/serialization/serializer.py @@ -102,8 +102,8 @@ def _classes_by_short_name(): "laboneq.dsl.parameter", "laboneq.dsl.calibration", "laboneq.dsl.device", - "laboneq.dsl.device.qubits", - "laboneq.dsl.device.quantum_operations", + "laboneq.dsl.quantum.qubits", + "laboneq.dsl.quantum.quantum_operations", "laboneq.dsl.device.server", "laboneq.dsl.device.servers.data_server", "laboneq.core.types.enums", diff --git a/laboneq/implementation/__init__.py b/laboneq/implementation/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/implementation/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/implementation/compilation_service/__init__.py b/laboneq/implementation/compilation_service/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/implementation/compilation_service/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/implementation/compilation_service/compilation_service.py b/laboneq/implementation/compilation_service/compilation_service.py index 88a3aad..1458705 100644 --- a/laboneq/implementation/compilation_service/compilation_service.py +++ b/laboneq/implementation/compilation_service/compilation_service.py @@ -38,4 +38,8 @@ def compilation_job_result(self, job_id: str) -> ScheduledExperiment: Get the result of a compilation job. Blocks until the result is available. """ - return ScheduledExperiment(recipe={"experiment": {"initializations": []}}) + return ScheduledExperiment( + recipe={ + "experiment": {"initializations": [], "realtime_execution_init": []} + } + ) diff --git a/laboneq/implementation/compilation_service/compilation_service_legacy.py b/laboneq/implementation/compilation_service/compilation_service_legacy.py new file mode 100644 index 0000000..7ccf49a --- /dev/null +++ b/laboneq/implementation/compilation_service/compilation_service_legacy.py @@ -0,0 +1,250 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import copy +import json +import logging +import time +import uuid + +from laboneq.compiler import Compiler +from laboneq.core.types.compiled_experiment import ( + CompiledExperiment as CompiledExperimentDSL, +) +from laboneq.data.compilation_job import CompilationJob, SignalInfoType +from laboneq.data.scheduled_experiment import ScheduledExperiment +from laboneq.interfaces.compilation_service import CompilationServiceAPI + +_logger = logging.getLogger(__name__) + + +class CompilationServiceLegacy(CompilationServiceAPI): + def __init__(self): + self._job_queue = [] + self._job_results = {} + + def submit_compilation_job(self, job: CompilationJob): + """ + Submit a compilation job. + """ + job_id = len(self._job_queue) + queue_entry = {"job_id": job_id, "job": job} + + experiment_json = convert_to_experiment_json(job) + compiler = Compiler() + compiler_output = compiler.run(experiment_json) + + self._job_results[job_id] = convert_compiler_output_to_scheduled_experiment( + compiler_output + ) + + self._job_queue.append(queue_entry) + return job_id + + def compilation_job_status(self, job_id: str): + """ + Get the status of a compilation job. + """ + return next(j for j in self._job_queue if j["job_id"] == job_id) + + def compilation_job_result(self, job_id: str) -> ScheduledExperiment: + """ + Get the result of a compilation job. Blocks until the result is available. + """ + num_tries = 10 + while True: + result = self._job_results.get(job_id) + if result: + return result + if num_tries == 0: + break + num_tries -= 1 + time.sleep(100e-3) + + +def convert_to_experiment_json(job: CompilationJob): + retval = { + "$schema": "../../schemas/qccs-schema_2_5_0.json", + "metadata": { + "version": "2.5.0", + "unit": {"time": "s", "frequency": "Hz", "phase": "rad"}, + "epsilon": {"time": 1e-12}, + "line_endings": "unix", + }, + "servers": [ + { + "id": "zi_server", + "host": "0.0.0.0", + "port": 8004, + "api_level": 6, + } + ], + } + + devices_in_job = {} + oscillators_in_job = {} + device_oscillators = {} + signal_connections = [] + + for signal in job.experiment_info.signals: + devices_in_job[signal.device.uid] = signal.device + _logger.info(f"Added device {signal.device} to job") + connection_dir = "in" if signal.type == SignalInfoType.INTEGRATION else "out" + signal_connections.append( + { + "signal": {"$ref": signal.uid}, + "device": {"$ref": signal.device.uid}, + "connection": { + "type": connection_dir, + "channels": [int(c) for c in signal.channels], + }, + } + ) + for osc in signal.oscillators: + oscillators_in_job[osc.uid] = osc + if osc.is_hardware: + device_oscillators.setdefault(signal.device.uid, []).append(osc) + + retval["devices"] = [ + { + "id": d.uid, + "server": {"$ref": "zi_server"}, + "serial": "DEV" + str(i), + "interface": "1GbE", + "driver": d.device_type.name.lower(), + } + for i, d in enumerate(devices_in_job.values()) + ] + for d in retval["devices"]: + if d["id"] in device_oscillators: + d["oscillators_list"] = [ + {"$ref": o.uid} for o in device_oscillators[d["id"]] + ] + retval["oscillators"] = [ + {"id": o.uid, "frequency": o.frequency, "hardware": o.is_hardware} + for o in oscillators_in_job.values() + ] + + signal_type_mapping = { + SignalInfoType.INTEGRATION: "integration", + SignalInfoType.RF: "single", + SignalInfoType.IQ: "iq", + } + + retval["signals"] = [ + { + "id": s.uid, + "signal_type": signal_type_mapping[s.type], + "oscillators_list": [{"$ref": o.uid} for o in s.oscillators], + } + for s in job.experiment_info.signals + ] + for s in retval["signals"]: + if s["oscillators_list"] == []: + del s["oscillators_list"] + else: + s["modulation"] = True + + retval["signal_connections"] = signal_connections + + retval["pulses"] = [ + not_none_fields_dict( + p, ["uid", "length", "amplitude", "phase", "function"], {"uid": "id"} + ) + for p in job.experiment_info.pulse_defs + ] + + def walk_sections(section, visitor): + visitor(section) + for s in section.children: + walk_sections(s, visitor) + + sections_flat = [] + + def collector(section): + sections_flat.append(section) + + for s in job.experiment_info.sections: + walk_sections(s, collector) + + retval["sections"] = [] + for s in sections_flat: + out_section = { + "id": s.uid, + "align": s.alignment.name.lower() if s.alignment else "left", + } + if s.count is not None: + out_section["repeat"] = { + "count": s.count, + "sections_list": [{"$ref": c.uid} for c in s.children], + "execution_type": s.execution_type, + "averaging_type": s.averaging_type, + } + else: + out_section["sections_list"] = [{"$ref": c.uid} for c in s.children] + if out_section["sections_list"] == []: + del out_section["sections_list"] + retval["sections"].append(out_section) + + for ssp in job.experiment_info.section_signal_pulses: + for s in retval["sections"]: + if s["id"] == ssp.section.uid: + if "signals_list" not in s: + s["signals_list"] = [] + signals_list_entry = next( + ( + sle + for sle in s["signals_list"] + if sle["signal"]["$ref"] == ssp.signal.uid + ), + None, + ) + if signals_list_entry is None: + signals_list_entry = { + "signal": {"$ref": ssp.signal.uid}, + "pulses_list": [], + } + s["signals_list"].append(signals_list_entry) + + signals_list_entry["pulses_list"].append( + {"pulse": {"$ref": ssp.pulse_def.uid}} + ) + break + + retval["experiment"] = { + "sections_list": [{"$ref": s.uid} for s in job.experiment_info.sections], + "signals_list": [{"$ref": s.uid} for s in job.experiment_info.signals], + } + + _logger.info(f"Generated job: {json.dumps(retval, indent=2)}") + + return retval + + +def not_none_fields_dict(obj, fields, translator): + return { + translator.get(f, f): getattr(obj, f) + for f in fields + if getattr(obj, f) is not None + } + + +def convert_compiler_output_to_scheduled_experiment( + compiler_output: CompiledExperimentDSL, +) -> ScheduledExperiment: + recipe = copy.deepcopy(compiler_output.recipe) + src = copy.deepcopy(compiler_output.src) + waves = copy.deepcopy(compiler_output.waves) + wave_indices = copy.deepcopy(compiler_output.wave_indices) + command_tables = copy.deepcopy(compiler_output.command_tables) + pulse_map = copy.deepcopy(compiler_output.pulse_map) + + return ScheduledExperiment( + uid=uuid.uuid4().hex, + recipe=recipe, + src=src, + waves=waves, + wave_indices=wave_indices, + command_tables=command_tables, + pulse_map=pulse_map, + ) diff --git a/laboneq/implementation/data_storage/__init__.py b/laboneq/implementation/data_storage/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/implementation/data_storage/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/implementation/data_storage/l1q_database_wrapper.py b/laboneq/implementation/data_storage/l1q_database_wrapper.py new file mode 100644 index 0000000..ab3a0ad --- /dev/null +++ b/laboneq/implementation/data_storage/l1q_database_wrapper.py @@ -0,0 +1,22 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from laboneq.implementation.data_storage_service.data_storage_service_sqlite_dict import ( + DataStorageServiceSqliteDict, +) +from laboneq.interfaces.data_storage.data_storage_api import DataStorageAPI + + +class L1QDatabase(DataStorageAPI): + """This is proxy object to access the data DataStorageServiceSqliteDict. It is used to give the user simple access to the data storage api. + This class is included in the simple.py so that the user can access it using 'from laboneq.simple import *' + """ + + def __init__(self, file_path=None): + self._data_storage_service = DataStorageServiceSqliteDict(file_path=file_path) + + def __getattribute__(self, attr): + if hasattr(DataStorageAPI, attr): + return getattr(self._data_storage_service, attr) + else: + return super().__getattribute__(attr) diff --git a/laboneq/implementation/data_storage_service/__init__.py b/laboneq/implementation/data_storage_service/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/implementation/data_storage_service/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/implementation/data_storage_service/data_storage_service_sqlite_dict.py b/laboneq/implementation/data_storage_service/data_storage_service_sqlite_dict.py new file mode 100644 index 0000000..3c3e594 --- /dev/null +++ b/laboneq/implementation/data_storage_service/data_storage_service_sqlite_dict.py @@ -0,0 +1,170 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import copy +import datetime +from pathlib import Path +from typing import Any, Callable, Dict, Iterable, Optional, Tuple, Union +from uuid import uuid4 + +from laboneq.dsl.serialization.serializer import Serializer +from laboneq.interfaces.data_storage.data_storage_api import DataStorageAPI + + +class DataStorageServiceSqliteDict(DataStorageAPI): + METADATA_TABLE = "metadata" + DATA_TABLE = "data" + + def __init__(self, file_path: Optional[str] = None): + from sqlitedict import SqliteDict + + if file_path is None: + file_path = "laboneq_data/data.db" + self._file_path = Path(file_path) + + # Check if the directory exists + self._file_path.parent.mkdir(parents=True, exist_ok=True) + + self._metadata_db = SqliteDict( + str(self._file_path), tablename=self.METADATA_TABLE, autocommit=True + ) + self._data_db = SqliteDict( + str(self._file_path), tablename=self.DATA_TABLE, autocommit=True + ) + + def get( + self, key: str, with_metadata: bool = False + ) -> Union[Any, Tuple[Any, Dict[str, Any]]]: + metadata = self.get_metadata(key) + data_type = metadata["__type"] + raw_data = self._data_db[key] + deserialized_object = Serializer.load(raw_data, data_type) + if with_metadata: + return deserialized_object, metadata + else: + return deserialized_object + + def get_metadata(self, key: str) -> Dict[str, Any]: + return self._convert_metadata(self._metadata_db[key]) + + def keys(self) -> Iterable[str]: + """Return an iterable of all keys in the database.""" + return self._metadata_db.keys() + + def store( + self, + data: Any, + key: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + ) -> str: + """ + Store data in the database. Only data that can be serialized with the L1Q serializer can be stored. + + Args: + key (str): The key to store the data under. + data (any): The data to store. + metadata (dict): Metadata to store with the data. Metadata can be used to search for data in the database. + Metadata must have strings as keys, and values may be strings or python standard datetime objects. + """ + if key is None: + key = uuid4().hex + self._validate_key(key) + + metadata = copy.deepcopy(metadata) + if metadata is None: + metadata = {} + metadata["__type"] = type(data).__name__ + self._validate_metadata(metadata) + self._metadata_db[key] = metadata + serialized_data = Serializer.to_dict(data) + self._data_db[key] = serialized_data + return key + + def delete(self, key: str) -> bool: + """ + Delete data from the database. + + Args: + key (str): The key of the data to delete. + """ + deleted = False + try: + del self._metadata_db[key] + deleted = True + except KeyError: + pass + try: + del self._data_db[key] + deleted = True + except KeyError: + pass + + return deleted + + def find( + self, + metadata: Optional[Dict[str, Any]] = None, + condition: Optional[Callable[[Dict[str, Any]], bool]] = None, + ) -> Iterable[str]: + """ + Find data in the database. + + Args: + metadata (dict): Metadata to search for. If not None, only data where all keys and values match the + metadata will be returned. + If None, returns all data which also matches the condition. + + condition (function): A function that takes a single argument (the metadata of a data entry) and returns True if the data entry should be returned. If None, ยจ + all data matching the metadata will be returned. + """ + for key in self.keys(): + metadata_for_key = self.get_metadata(key) + if metadata is not None: + if not self._metadata_matches(metadata, metadata_for_key): + continue + if condition is not None: + if not condition(metadata_for_key): + continue + yield key + + def _validate_key(self, key: str) -> None: + if not isinstance(key, str): + raise ValueError("Key must be a string.") + + def _validate_metadata(self, metadata: Dict[str, Any]) -> None: + for metadata_key, metadata_value in metadata.items(): + if not isinstance(metadata_key, str): + raise ValueError("Metadata keys must be strings.") + if not isinstance( + metadata_value, (str, int, float, bool, bytes, datetime.datetime) + ): + raise ValueError( + "Metadata values must be strings, ints, floats, bools, bytes or datetime objects." + ) + if isinstance(metadata_value, datetime.datetime): + metadata[metadata_key] = {"datetime": metadata_value.isoformat()} + + def _metadata_matches( + self, metadata: Dict[str, Any], metadata_to_match: Dict[str, Any] + ) -> bool: + return all( + metadata_key in metadata_to_match + and metadata_value == metadata_to_match[metadata_key] + for metadata_key, metadata_value in metadata.items() + ) + + @staticmethod + def _convert_metadata(metadata: Dict[str, Any]) -> Dict[str, Any]: + converted_metadata = {} + for metadata_key, metadata_value in metadata.items(): + if isinstance(metadata_value, dict) and "datetime" in metadata_value: + converted_metadata[metadata_key] = datetime.datetime.fromisoformat( + metadata_value["datetime"] + ) + else: + converted_metadata[metadata_key] = metadata_value + return converted_metadata + + def close(self): + self._metadata_db.close() + self._data_db.close() diff --git a/laboneq/implementation/experiment_workflow/__init__.py b/laboneq/implementation/experiment_workflow/__init__.py new file mode 100644 index 0000000..f7acd1f --- /dev/null +++ b/laboneq/implementation/experiment_workflow/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from .experiment_workflow import ExperimentWorkflow diff --git a/laboneq/implementation/experiment_workflow/experiment_workflow.py b/laboneq/implementation/experiment_workflow/experiment_workflow.py index 16d567e..6d50b22 100644 --- a/laboneq/implementation/experiment_workflow/experiment_workflow.py +++ b/laboneq/implementation/experiment_workflow/experiment_workflow.py @@ -91,8 +91,8 @@ def run_payload(self, execution_payload: ExecutionPayload): """ Run an experiment job. """ - job_id = self._experiment_job_queue.submit_execution_payload(execution_payload) - return self._experiment_job_queue.run_job_result(job_id) + job_id = self._runner.submit_execution_payload(execution_payload) + return self._runner.run_job_result(job_id) def build_payload_for_current_experiment(self) -> ExecutionPayload: """ diff --git a/laboneq/implementation/legacy_adapters/__init.py__ b/laboneq/implementation/legacy_adapters/__init.py__ new file mode 100644 index 0000000..e69de29 diff --git a/laboneq/implementation/legacy_adapters/__init__.py b/laboneq/implementation/legacy_adapters/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/implementation/legacy_adapters/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/implementation/legacy_adapters/converters_calibration/__init__.py b/laboneq/implementation/legacy_adapters/converters_calibration/__init__.py new file mode 100644 index 0000000..afc77e6 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_calibration/__init__.py @@ -0,0 +1,214 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +# converter functions for data type package 'calibration' +# AUTOGENERATED, DO NOT EDIT +from laboneq.core.types.enums.carrier_type import CarrierType as CarrierTypeDSL +from laboneq.core.types.enums.high_pass_compensation_clearing import ( + HighPassCompensationClearing as HighPassCompensationClearingDSL, +) +from laboneq.core.types.enums.modulation_type import ModulationType as ModulationTypeDSL +from laboneq.data.calibration import BounceCompensation as BounceCompensationDATA +from laboneq.data.calibration import Calibration as CalibrationDATA +from laboneq.data.calibration import CarrierType as CarrierTypeDATA +from laboneq.data.calibration import ( + ExponentialCompensation as ExponentialCompensationDATA, +) +from laboneq.data.calibration import FIRCompensation as FIRCompensationDATA +from laboneq.data.calibration import HighPassCompensation as HighPassCompensationDATA +from laboneq.data.calibration import MixerCalibration as MixerCalibrationDATA +from laboneq.data.calibration import ModulationType as ModulationTypeDATA +from laboneq.data.calibration import Oscillator as OscillatorDATA +from laboneq.data.calibration import Precompensation as PrecompensationDATA +from laboneq.data.calibration import Signal as SignalDATA +from laboneq.dsl.calibration.calibration import Calibration as CalibrationDSL +from laboneq.dsl.calibration.mixer_calibration import ( + MixerCalibration as MixerCalibrationDSL, +) +from laboneq.dsl.calibration.observable import Signal as SignalDSL +from laboneq.dsl.calibration.oscillator import Oscillator as OscillatorDSL +from laboneq.dsl.calibration.precompensation import ( + BounceCompensation as BounceCompensationDSL, +) +from laboneq.dsl.calibration.precompensation import ( + ExponentialCompensation as ExponentialCompensationDSL, +) +from laboneq.dsl.calibration.precompensation import ( + FIRCompensation as FIRCompensationDSL, +) +from laboneq.dsl.calibration.precompensation import ( + HighPassCompensation as HighPassCompensationDSL, +) +from laboneq.dsl.calibration.precompensation import ( + Precompensation as PrecompensationDSL, +) +from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic + +from .post_process_calibration import post_process + + +def get_converter_function_calibration(orig): + converter_function_directory = { + BounceCompensationDSL: convert_BounceCompensation, + CalibrationDSL: convert_Calibration, + ExponentialCompensationDSL: convert_ExponentialCompensation, + FIRCompensationDSL: convert_FIRCompensation, + HighPassCompensationDSL: convert_HighPassCompensation, + MixerCalibrationDSL: convert_MixerCalibration, + OscillatorDSL: convert_Oscillator, + PrecompensationDSL: convert_Precompensation, + SignalDSL: convert_Signal, + } + return converter_function_directory.get(orig) + + +def convert_CarrierType(orig: CarrierTypeDSL): + return ( + next(e for e in CarrierTypeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_ModulationType(orig: ModulationTypeDSL): + return ( + next(e for e in ModulationTypeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_BounceCompensation(orig: BounceCompensationDSL): + if orig is None: + return None + retval = BounceCompensationDATA() + retval.amplitude = orig.amplitude + retval.delay = orig.delay + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_Calibration(orig: CalibrationDSL): + if orig is None: + return None + retval = CalibrationDATA() + retval.calibration_items = convert_dynamic( + orig.calibration_items, + source_type_string="Dict", + target_type_string="Dict", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_calibration, + ) + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_ExponentialCompensation(orig: ExponentialCompensationDSL): + if orig is None: + return None + retval = ExponentialCompensationDATA() + retval.amplitude = orig.amplitude + retval.timeconstant = orig.timeconstant + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_FIRCompensation(orig: FIRCompensationDSL): + if orig is None: + return None + retval = FIRCompensationDATA() + retval.coefficients = convert_dynamic( + orig.coefficients, + source_type_string="ArrayLike", + target_type_string="ArrayLike", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_calibration, + ) + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_HighPassCompensation(orig: HighPassCompensationDSL): + if orig is None: + return None + retval = HighPassCompensationDATA() + retval.timeconstant = orig.timeconstant + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_MixerCalibration(orig: MixerCalibrationDSL): + if orig is None: + return None + retval = MixerCalibrationDATA() + retval.uid = orig.uid + retval.correction_matrix = convert_dynamic( + orig.correction_matrix, + source_type_string="List[List[float]]", + target_type_string="List[List[float]]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_calibration, + ) + retval.voltage_offsets = convert_dynamic( + orig.voltage_offsets, + source_type_string="List[float]", + target_type_string="List[float]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_calibration, + ) + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_Oscillator(orig: OscillatorDSL): + if orig is None: + return None + retval = OscillatorDATA() + retval.uid = orig.uid + retval.carrier_type = convert_CarrierType(orig.carrier_type) + retval.frequency = convert_dynamic( + orig.frequency, + source_type_string="Union[float, Parameter]", + target_type_string="Parameter", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_calibration, + ) + retval.modulation_type = convert_ModulationType(orig.modulation_type) + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_Precompensation(orig: PrecompensationDSL): + if orig is None: + return None + retval = PrecompensationDATA() + retval.bounce = convert_BounceCompensation(orig.bounce) + retval.high_pass = convert_HighPassCompensation(orig.high_pass) + retval.uid = orig.uid + retval.FIR = convert_FIRCompensation(orig.FIR) + retval.exponential = convert_dynamic( + orig.exponential, + source_type_string="List[ExponentialCompensation]", + target_type_string="List[ExponentialCompensation]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_calibration, + ) + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) + + +def convert_Signal(orig: SignalDSL): + if orig is None: + return None + retval = SignalDATA() + return post_process( + orig, retval, conversion_function_lookup=get_converter_function_calibration + ) diff --git a/laboneq/implementation/legacy_adapters/converters_calibration/post_process_calibration.py b/laboneq/implementation/legacy_adapters/converters_calibration/post_process_calibration.py new file mode 100644 index 0000000..9c25299 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_calibration/post_process_calibration.py @@ -0,0 +1,6 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + + +def post_process(source, target): + return target diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_description/__init__.py b/laboneq/implementation/legacy_adapters/converters_experiment_description/__init__.py new file mode 100644 index 0000000..73d7ea1 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_experiment_description/__init__.py @@ -0,0 +1,654 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +# converter functions for data type package 'experiment_description' +# AUTOGENERATED, DO NOT EDIT +from typing import Any as AnyDSL + +from laboneq.core.types.enums.acquisition_type import ( + AcquisitionType as AcquisitionTypeDSL, +) +from laboneq.core.types.enums.averaging_mode import AveragingMode as AveragingModeDSL +from laboneq.core.types.enums.execution_type import ExecutionType as ExecutionTypeDSL +from laboneq.core.types.enums.repetition_mode import RepetitionMode as RepetitionModeDSL +from laboneq.core.types.enums.section_alignment import ( + SectionAlignment as SectionAlignmentDSL, +) +from laboneq.data.experiment_description import Acquire as AcquireDATA +from laboneq.data.experiment_description import AcquireLoopNt as AcquireLoopNtDATA +from laboneq.data.experiment_description import AcquireLoopRt as AcquireLoopRtDATA +from laboneq.data.experiment_description import AcquisitionType as AcquisitionTypeDATA +from laboneq.data.experiment_description import Any as AnyDATA +from laboneq.data.experiment_description import AveragingMode as AveragingModeDATA +from laboneq.data.experiment_description import Call as CallDATA +from laboneq.data.experiment_description import Case as CaseDATA +from laboneq.data.experiment_description import Delay as DelayDATA +from laboneq.data.experiment_description import ExecutionType as ExecutionTypeDATA +from laboneq.data.experiment_description import Experiment as ExperimentDATA +from laboneq.data.experiment_description import ExperimentSignal as ExperimentSignalDATA +from laboneq.data.experiment_description import ( + LinearSweepParameter as LinearSweepParameterDATA, +) +from laboneq.data.experiment_description import Match as MatchDATA +from laboneq.data.experiment_description import Operation as OperationDATA +from laboneq.data.experiment_description import Optional as OptionalDATA +from laboneq.data.experiment_description import Parameter as ParameterDATA +from laboneq.data.experiment_description import PlayPulse as PlayPulseDATA +from laboneq.data.experiment_description import Pulse as PulseDATA +from laboneq.data.experiment_description import PulseFunctional as PulseFunctionalDATA +from laboneq.data.experiment_description import ( + PulseSampledComplex as PulseSampledComplexDATA, +) +from laboneq.data.experiment_description import PulseSampledReal as PulseSampledRealDATA +from laboneq.data.experiment_description import RepetitionMode as RepetitionModeDATA +from laboneq.data.experiment_description import Reserve as ReserveDATA +from laboneq.data.experiment_description import Section as SectionDATA +from laboneq.data.experiment_description import SectionAlignment as SectionAlignmentDATA +from laboneq.data.experiment_description import Set as SetDATA +from laboneq.data.experiment_description import ( + SignalCalibration as SignalCalibrationDATA, +) +from laboneq.data.experiment_description import Sweep as SweepDATA +from laboneq.data.experiment_description import SweepParameter as SweepParameterDATA +from laboneq.dsl.calibration.signal_calibration import ( + SignalCalibration as SignalCalibrationDSL, +) +from laboneq.dsl.experiment.acquire import Acquire as AcquireDSL +from laboneq.dsl.experiment.call import Call as CallDSL +from laboneq.dsl.experiment.delay import Delay as DelayDSL +from laboneq.dsl.experiment.experiment import Experiment as ExperimentDSL +from laboneq.dsl.experiment.experiment_signal import ( + ExperimentSignal as ExperimentSignalDSL, +) +from laboneq.dsl.experiment.operation import Operation as OperationDSL +from laboneq.dsl.experiment.play_pulse import PlayPulse as PlayPulseDSL +from laboneq.dsl.experiment.pulse import Pulse as PulseDSL +from laboneq.dsl.experiment.pulse import PulseFunctional as PulseFunctionalDSL +from laboneq.dsl.experiment.pulse import PulseSampledComplex as PulseSampledComplexDSL +from laboneq.dsl.experiment.pulse import PulseSampledReal as PulseSampledRealDSL +from laboneq.dsl.experiment.reserve import Reserve as ReserveDSL +from laboneq.dsl.experiment.section import AcquireLoopNt as AcquireLoopNtDSL +from laboneq.dsl.experiment.section import AcquireLoopRt as AcquireLoopRtDSL +from laboneq.dsl.experiment.section import Case as CaseDSL +from laboneq.dsl.experiment.section import Match as MatchDSL +from laboneq.dsl.experiment.section import Section as SectionDSL +from laboneq.dsl.experiment.section import Sweep as SweepDSL +from laboneq.dsl.experiment.set import Set as SetDSL +from laboneq.dsl.parameter import LinearSweepParameter as LinearSweepParameterDSL +from laboneq.dsl.parameter import Parameter as ParameterDSL +from laboneq.dsl.parameter import SweepParameter as SweepParameterDSL +from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic + +from .post_process_experiment_description import post_process + + +def get_converter_function_experiment_description(orig): + converter_function_directory = { + AcquireDSL: convert_Acquire, + AcquireLoopNtDSL: convert_AcquireLoopNt, + AcquireLoopRtDSL: convert_AcquireLoopRt, + CallDSL: convert_Call, + CaseDSL: convert_Case, + DelayDSL: convert_Delay, + ExperimentDSL: convert_Experiment, + ExperimentSignalDSL: convert_ExperimentSignal, + LinearSweepParameterDSL: convert_LinearSweepParameter, + MatchDSL: convert_Match, + OperationDSL: convert_Operation, + ParameterDSL: convert_Parameter, + PlayPulseDSL: convert_PlayPulse, + PulseFunctionalDSL: convert_PulseFunctional, + PulseSampledComplexDSL: convert_PulseSampledComplex, + PulseSampledRealDSL: convert_PulseSampledReal, + ReserveDSL: convert_Reserve, + SectionDSL: convert_Section, + SetDSL: convert_Set, + SignalCalibrationDSL: convert_SignalCalibration, + SweepDSL: convert_Sweep, + SweepParameterDSL: convert_SweepParameter, + } + return converter_function_directory.get(orig) + + +def convert_AcquisitionType(orig: AcquisitionTypeDSL): + return ( + next(e for e in AcquisitionTypeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_RepetitionMode(orig: RepetitionModeDSL): + return ( + next(e for e in RepetitionModeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_SectionAlignment(orig: SectionAlignmentDSL): + return ( + next(e for e in SectionAlignmentDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_ExecutionType(orig: ExecutionTypeDSL): + return ( + next(e for e in ExecutionTypeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_AveragingMode(orig: AveragingModeDSL): + return ( + next(e for e in AveragingModeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_Acquire(orig: AcquireDSL): + if orig is None: + return None + retval = AcquireDATA() + retval.length = orig.length + retval.signal = orig.signal + retval.handle = orig.handle + retval.pulse_parameters = convert_dynamic( + orig.pulse_parameters, + source_type_string="Dict", + target_type_string="Any", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.kernel = convert_dynamic( + orig.kernel, + source_type_hint=PulseDSL, + target_type_hint=PulseDATA, + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_AcquireLoopNt(orig: AcquireLoopNtDSL): + if orig is None: + return None + retval = AcquireLoopNtDATA() + retval.uid = orig.uid + retval.execution_type = convert_ExecutionType(orig.execution_type) + retval.averaging_mode = convert_AveragingMode(orig.averaging_mode) + retval.count = orig.count + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_AcquireLoopRt(orig: AcquireLoopRtDSL): + if orig is None: + return None + retval = AcquireLoopRtDATA() + retval.repetition_time = orig.repetition_time + retval.execution_type = convert_ExecutionType(orig.execution_type) + retval.count = orig.count + retval.uid = orig.uid + retval.acquisition_type = convert_AcquisitionType(orig.acquisition_type) + retval.averaging_mode = convert_AveragingMode(orig.averaging_mode) + retval.repetition_mode = convert_RepetitionMode(orig.repetition_mode) + retval.reset_oscillator_phase = orig.reset_oscillator_phase + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Call(orig: CallDSL): + if orig is None: + return None + retval = CallDATA() + retval.func_name = convert_dynamic( + orig.func_name, + source_type_hint=AnyDSL, + target_type_hint=AnyDATA, + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.args = convert_dynamic( + orig.args, + source_type_string="Dict", + target_type_string="Dict", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Case(orig: CaseDSL): + if orig is None: + return None + retval = CaseDATA() + retval.uid = orig.uid + retval.state = orig.state + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Delay(orig: DelayDSL): + if orig is None: + return None + retval = DelayDATA() + retval.precompensation_clear = orig.precompensation_clear + retval.signal = orig.signal + retval.time = convert_dynamic( + orig.time, + source_type_string="Union[float, Parameter]", + target_type_string="Parameter", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Experiment(orig: ExperimentDSL): + if orig is None: + return None + retval = ExperimentDATA() + retval.sections = convert_dynamic( + orig.sections, + source_type_hint=SectionDSL, + target_type_hint=SectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.signals = convert_dynamic( + orig.signals, + source_type_string="Union[Dict[str, ExperimentSignal], List[ExperimentSignal]]", + target_type_string="Union[Dict[str, ExperimentSignal], List[ExperimentSignal]]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.uid = orig.uid + retval.epsilon = orig.epsilon + retval._section_stack = convert_dynamic( + orig._section_stack, + source_type_hint=SectionDSL, + target_type_hint=SectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_ExperimentSignal(orig: ExperimentSignalDSL): + if orig is None: + return None + retval = ExperimentSignalDATA() + retval.uid = orig.uid + retval.calibration = convert_SignalCalibration(orig.calibration) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_LinearSweepParameter(orig: LinearSweepParameterDSL): + if orig is None: + return None + retval = LinearSweepParameterDATA() + retval.axis_name = orig.axis_name + retval.start = orig.start + retval.stop = orig.stop + retval.count = orig.count + retval.uid = orig.uid + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Match(orig: MatchDSL): + if orig is None: + return None + retval = MatchDATA() + retval.uid = orig.uid + retval.handle = orig.handle + retval.local = orig.local + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Operation(orig: OperationDSL): + if orig is None: + return None + retval = OperationDATA() + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Parameter(orig: ParameterDSL): + if orig is None: + return None + retval = ParameterDATA() + retval.uid = orig.uid + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_PlayPulse(orig: PlayPulseDSL): + if orig is None: + return None + retval = PlayPulseDATA() + retval.amplitude = convert_dynamic( + orig.amplitude, + source_type_string="Union[float, complex, Parameter]", + target_type_string="Union[float, complex, Parameter]", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.precompensation_clear = orig.precompensation_clear + retval.phase = orig.phase + retval.length = convert_dynamic( + orig.length, + source_type_string="Union[float, Parameter]", + target_type_string="Parameter", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.pulse = convert_dynamic( + orig.pulse, + source_type_hint=PulseDSL, + target_type_hint=PulseDATA, + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.pulse_parameters = convert_dynamic( + orig.pulse_parameters, + source_type_hint=AnyDSL, + target_type_hint=OptionalDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.increment_oscillator_phase = convert_dynamic( + orig.increment_oscillator_phase, + source_type_string="Union[float, Parameter]", + target_type_string="Parameter", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.marker = convert_dynamic( + orig.marker, + source_type_string="Dict", + target_type_string="Dict", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.set_oscillator_phase = orig.set_oscillator_phase + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_PulseFunctional(orig: PulseFunctionalDSL): + if orig is None: + return None + retval = PulseFunctionalDATA() + retval.amplitude = orig.amplitude + retval.length = orig.length + retval.pulse_parameters = convert_dynamic( + orig.pulse_parameters, + source_type_hint=AnyDSL, + target_type_hint=OptionalDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.uid = orig.uid + retval.function = orig.function + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_PulseSampledComplex(orig: PulseSampledComplexDSL): + if orig is None: + return None + retval = PulseSampledComplexDATA() + retval.uid = orig.uid + retval.samples = convert_dynamic( + orig.samples, + source_type_string="ArrayLike", + target_type_string="ArrayLike", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_PulseSampledReal(orig: PulseSampledRealDSL): + if orig is None: + return None + retval = PulseSampledRealDATA() + retval.uid = orig.uid + retval.samples = convert_dynamic( + orig.samples, + source_type_string="ArrayLike", + target_type_string="ArrayLike", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Reserve(orig: ReserveDSL): + if orig is None: + return None + retval = ReserveDATA() + retval.signal = orig.signal + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Section(orig: SectionDSL): + if orig is None: + return None + retval = SectionDATA() + retval.execution_type = convert_ExecutionType(orig.execution_type) + retval.length = orig.length + retval.on_system_grid = orig.on_system_grid + retval.children = convert_dynamic( + orig.children, + source_type_hint=OperationDSL, + target_type_hint=OperationDATA, + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.alignment = convert_SectionAlignment(orig.alignment) + retval.uid = orig.uid + retval.play_after = convert_dynamic( + orig.play_after, + source_type_string="Union[str, List[str]]", + target_type_string="List[str]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.trigger = convert_dynamic( + orig.trigger, + source_type_string="Dict[str, Dict]", + target_type_string="Dict", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Set(orig: SetDSL): + if orig is None: + return None + retval = SetDATA() + retval.value = convert_dynamic( + orig.value, + source_type_hint=AnyDSL, + target_type_hint=AnyDATA, + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.path = orig.path + retval.key = orig.key + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_SignalCalibration(orig: SignalCalibrationDSL): + if orig is None: + return None + retval = SignalCalibrationDATA() + retval.local_oscillator = orig.local_oscillator + retval.amplitude = convert_dynamic( + orig.amplitude, + source_type_string="float", + target_type_string="Any", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.oscillator = orig.oscillator + retval.precompensation = orig.precompensation + retval.delay_signal = convert_dynamic( + orig.delay_signal, + source_type_string="float", + target_type_string="Any", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.mixer_calibration = orig.mixer_calibration + retval.voltage_offset = convert_dynamic( + orig.voltage_offset, + source_type_string="float", + target_type_string="Any", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.range = convert_dynamic( + orig.range, + source_type_hint=AnyDSL, + target_type_hint=AnyDATA, + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.port_delay = convert_dynamic( + orig.port_delay, + source_type_string="float", + target_type_string="Any", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.threshold = convert_dynamic( + orig.threshold, + source_type_string="float", + target_type_string="Any", + orig_is_collection=False, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.port_mode = orig.port_mode + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_Sweep(orig: SweepDSL): + if orig is None: + return None + retval = SweepDATA() + retval.parameters = convert_dynamic( + orig.parameters, + source_type_hint=ParameterDSL, + target_type_hint=ParameterDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.uid = orig.uid + retval.execution_type = convert_ExecutionType(orig.execution_type) + retval.reset_oscillator_phase = orig.reset_oscillator_phase + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) + + +def convert_SweepParameter(orig: SweepParameterDSL): + if orig is None: + return None + retval = SweepParameterDATA() + retval.values = convert_dynamic( + orig.values, + source_type_string="ArrayLike", + target_type_string="ArrayLike", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_description, + ) + retval.axis_name = orig.axis_name + retval.uid = orig.uid + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_description, + ) diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_description/post_process_experiment_description.py b/laboneq/implementation/legacy_adapters/converters_experiment_description/post_process_experiment_description.py new file mode 100644 index 0000000..0062f00 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_experiment_description/post_process_experiment_description.py @@ -0,0 +1,50 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import logging + +from laboneq.data.experiment_description import Experiment, PlayPulse +from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic + +_logger = logging.getLogger(__name__) + +PULSES = {} + + +def post_process(source, target, conversion_function_lookup): + global PULSES + + if type(source).__name__ in ["Section", "AcquireLoopNt", "AcquireLoopRt"]: + _logger.info( + f"Converting {type(source).__name__} {source},\n converting children" + ) + if source.children is not None: + target.children = convert_dynamic( + source.children, + source_type_string="List", + conversion_function_lookup=conversion_function_lookup, + ) + if source.trigger is not None: + target.trigger = convert_dynamic( + source.trigger, + source_type_string="Dict", + conversion_function_lookup=conversion_function_lookup, + ) + return target + + if type(target) == Experiment: + _logger.info(f"Postprocess_experiment for {source.uid}") + target.pulses = list(PULSES.values()) + PULSES = {} + return target + + if type(target) == PlayPulse: + _logger.info(f"Postprocess_experiment for {source}") + if source.pulse.uid not in PULSES: + PULSES[source.pulse.uid] = conversion_function_lookup(type(source.pulse))( + source.pulse + ) + target.pulse = PULSES[source.pulse.uid] + target.signal_uid = source.signal + + return target diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_results/__init__.py b/laboneq/implementation/legacy_adapters/converters_experiment_results/__init__.py new file mode 100644 index 0000000..feb2c5c --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_experiment_results/__init__.py @@ -0,0 +1,49 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +# converter functions for data type package 'experiment_results' +# AUTOGENERATED, DO NOT EDIT +from laboneq.data.experiment_results import AcquiredResult as AcquiredResultDATA +from laboneq.dsl.result.acquired_result import AcquiredResult as AcquiredResultDSL +from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic + +from .post_process_experiment_results import post_process + + +def get_converter_function_experiment_results(orig): + converter_function_directory = { + AcquiredResultDSL: convert_AcquiredResult, + } + return converter_function_directory.get(orig) + + +def convert_AcquiredResult(orig: AcquiredResultDSL): + if orig is None: + return None + retval = AcquiredResultDATA() + retval.last_nt_step = convert_dynamic( + orig.last_nt_step, + source_type_string="List[int]", + target_type_string="List[int]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_results, + ) + retval.axis_name = convert_dynamic( + orig.axis_name, + source_type_string="List[Union[str, List[str]]]", + target_type_string="List", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_results, + ) + retval.data = convert_dynamic( + orig.data, + source_type_string="ArrayLike", + target_type_string="ArrayLike", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_experiment_results, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_experiment_results, + ) diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_results/post_process_experiment_results.py b/laboneq/implementation/legacy_adapters/converters_experiment_results/post_process_experiment_results.py new file mode 100644 index 0000000..9c25299 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_experiment_results/post_process_experiment_results.py @@ -0,0 +1,6 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + + +def post_process(source, target): + return target diff --git a/laboneq/implementation/legacy_adapters/converters_scheduled_experiment/__init__.py b/laboneq/implementation/legacy_adapters/converters_scheduled_experiment/__init__.py new file mode 100644 index 0000000..15fd0ab --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_scheduled_experiment/__init__.py @@ -0,0 +1,120 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +# converter functions for data type package 'scheduled_experiment' +# AUTOGENERATED, DO NOT EDIT +from typing import Any as AnyDSL + +from laboneq.core.types.compiled_experiment import PulseInstance as PulseInstanceDSL +from laboneq.core.types.compiled_experiment import PulseMapEntry as PulseMapEntryDSL +from laboneq.core.types.compiled_experiment import ( + PulseWaveformMap as PulseWaveformMapDSL, +) +from laboneq.core.types.enums.mixer_type import MixerType as MixerTypeDSL +from laboneq.data.scheduled_experiment import MixerType as MixerTypeDATA +from laboneq.data.scheduled_experiment import Optional as OptionalDATA +from laboneq.data.scheduled_experiment import PulseInstance as PulseInstanceDATA +from laboneq.data.scheduled_experiment import PulseMapEntry as PulseMapEntryDATA +from laboneq.data.scheduled_experiment import PulseWaveformMap as PulseWaveformMapDATA +from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic + +from .post_process_scheduled_experiment import post_process + + +def get_converter_function_scheduled_experiment(orig): + converter_function_directory = { + PulseInstanceDSL: convert_PulseInstance, + PulseMapEntryDSL: convert_PulseMapEntry, + PulseWaveformMapDSL: convert_PulseWaveformMap, + } + return converter_function_directory.get(orig) + + +def convert_MixerType(orig: MixerTypeDSL): + return ( + next(e for e in MixerTypeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_PulseInstance(orig: PulseInstanceDSL): + if orig is None: + return None + retval = PulseInstanceDATA() + retval.amplitude = orig.amplitude + retval.modulation_phase = orig.modulation_phase + retval.pulse_pulse_parameters = convert_dynamic( + orig.pulse_pulse_parameters, + source_type_hint=AnyDSL, + target_type_hint=OptionalDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) + retval.length = orig.length + retval.has_marker2 = orig.has_marker2 + retval.channel = orig.channel + retval.has_marker1 = orig.has_marker1 + retval.needs_conjugate = orig.needs_conjugate + retval.modulation_frequency = orig.modulation_frequency + retval.iq_phase = orig.iq_phase + retval.overlaps = convert_dynamic( + orig.overlaps, + source_type_string="List[str]", + target_type_string="List[str]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) + retval.offset_samples = orig.offset_samples + retval.play_pulse_parameters = convert_dynamic( + orig.play_pulse_parameters, + source_type_hint=AnyDSL, + target_type_hint=OptionalDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) + + +def convert_PulseMapEntry(orig: PulseMapEntryDSL): + if orig is None: + return None + retval = PulseMapEntryDATA() + retval.waveforms = convert_dynamic( + orig.waveforms, + source_type_hint=PulseWaveformMapDSL, + target_type_hint=PulseWaveformMapDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) + + +def convert_PulseWaveformMap(orig: PulseWaveformMapDSL): + if orig is None: + return None + retval = PulseWaveformMapDATA() + retval.sampling_rate = orig.sampling_rate + retval.length_samples = orig.length_samples + retval.instances = convert_dynamic( + orig.instances, + source_type_hint=PulseInstanceDSL, + target_type_hint=PulseInstanceDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) + retval.signal_type = orig.signal_type + retval.mixer_type = convert_MixerType(orig.mixer_type) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_scheduled_experiment, + ) diff --git a/laboneq/implementation/legacy_adapters/converters_scheduled_experiment/post_process_scheduled_experiment.py b/laboneq/implementation/legacy_adapters/converters_scheduled_experiment/post_process_scheduled_experiment.py new file mode 100644 index 0000000..9c25299 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_scheduled_experiment/post_process_scheduled_experiment.py @@ -0,0 +1,6 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + + +def post_process(source, target): + return target diff --git a/laboneq/implementation/legacy_adapters/converters_setup_description/__init__.py b/laboneq/implementation/legacy_adapters/converters_setup_description/__init__.py new file mode 100644 index 0000000..649acf1 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_setup_description/__init__.py @@ -0,0 +1,425 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +# converter functions for data type package 'setup_description' +# AUTOGENERATED, DO NOT EDIT +from typing import Any as AnyDSL + +from laboneq.core.types.enums.io_direction import IODirection as IODirectionDSL +from laboneq.core.types.enums.io_signal_type import IOSignalType as IOSignalTypeDSL +from laboneq.core.types.enums.port_mode import PortMode as PortModeDSL +from laboneq.core.types.enums.reference_clock_source import ( + ReferenceClockSource as ReferenceClockSourceDSL, +) +from laboneq.data.setup_description import Any as AnyDATA +from laboneq.data.setup_description import Connection as ConnectionDATA +from laboneq.data.setup_description import Instrument as HDAWGDATA +from laboneq.data.setup_description import Instrument as InstrumentDATA +from laboneq.data.setup_description import Instrument as PQSCDATA +from laboneq.data.setup_description import Instrument as SHFQADATA +from laboneq.data.setup_description import Instrument as SHFSGDATA +from laboneq.data.setup_description import Instrument as UHFQADATA +from laboneq.data.setup_description import IODirection as IODirectionDATA +from laboneq.data.setup_description import IOSignalType as IOSignalTypeDATA +from laboneq.data.setup_description import LogicalSignal as LogicalSignalDATA +from laboneq.data.setup_description import LogicalSignalGroup as LogicalSignalGroupDATA +from laboneq.data.setup_description import PhysicalChannel as PhysicalChannelDATA +from laboneq.data.setup_description import ( + PhysicalChannelType as PhysicalChannelTypeDATA, +) +from laboneq.data.setup_description import Port as PortDATA +from laboneq.data.setup_description import PortMode as PortModeDATA +from laboneq.data.setup_description import QuantumElement as QuantumElementDATA +from laboneq.data.setup_description import Qubit as QubitDATA +from laboneq.data.setup_description import ( + ReferenceClockSource as ReferenceClockSourceDATA, +) +from laboneq.data.setup_description import Server as DataServerDATA +from laboneq.data.setup_description import Server as ServerDATA +from laboneq.data.setup_description import Setup as DeviceSetupDATA +from laboneq.dsl.device.connection import Connection as ConnectionDSL +from laboneq.dsl.device.device_setup import DeviceSetup as DeviceSetupDSL +from laboneq.dsl.device.instrument import Instrument as InstrumentDSL +from laboneq.dsl.device.instruments.hdawg import HDAWG as HDAWGDSL +from laboneq.dsl.device.instruments.pqsc import PQSC as PQSCDSL +from laboneq.dsl.device.instruments.shfqa import SHFQA as SHFQADSL +from laboneq.dsl.device.instruments.shfsg import SHFSG as SHFSGDSL +from laboneq.dsl.device.instruments.uhfqa import UHFQA as UHFQADSL +from laboneq.dsl.device.io_units.logical_signal import LogicalSignal as LogicalSignalDSL +from laboneq.dsl.device.io_units.physical_channel import ( + PhysicalChannel as PhysicalChannelDSL, +) +from laboneq.dsl.device.io_units.physical_channel import ( + PhysicalChannelType as PhysicalChannelTypeDSL, +) +from laboneq.dsl.device.logical_signal_group import ( + LogicalSignalGroup as LogicalSignalGroupDSL, +) +from laboneq.dsl.device.ports import Port as PortDSL +from laboneq.dsl.device.server import Server as ServerDSL +from laboneq.dsl.device.servers.data_server import DataServer as DataServerDSL +from laboneq.dsl.quantum.qubits import QuantumElement as QuantumElementDSL +from laboneq.dsl.quantum.qubits import Qubit as QubitDSL +from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic + +from .post_process_setup_description import post_process + + +def get_converter_function_setup_description(orig): + converter_function_directory = { + ConnectionDSL: convert_Connection, + DataServerDSL: convert_DataServer, + DeviceSetupDSL: convert_DeviceSetup, + HDAWGDSL: convert_HDAWG, + InstrumentDSL: convert_Instrument, + LogicalSignalDSL: convert_LogicalSignal, + LogicalSignalGroupDSL: convert_LogicalSignalGroup, + PQSCDSL: convert_PQSC, + PhysicalChannelDSL: convert_PhysicalChannel, + PortDSL: convert_Port, + QuantumElementDSL: convert_QuantumElement, + QubitDSL: convert_Qubit, + SHFQADSL: convert_SHFQA, + SHFSGDSL: convert_SHFSG, + ServerDSL: convert_Server, + UHFQADSL: convert_UHFQA, + } + return converter_function_directory.get(orig) + + +def convert_IOSignalType(orig: IOSignalTypeDSL): + return ( + next(e for e in IOSignalTypeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_PortMode(orig: PortModeDSL): + return ( + next(e for e in PortModeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_PhysicalChannelType(orig: PhysicalChannelTypeDSL): + return ( + next(e for e in PhysicalChannelTypeDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_ReferenceClockSource(orig: ReferenceClockSourceDSL): + return ( + next(e for e in ReferenceClockSourceDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_IODirection(orig: IODirectionDSL): + return ( + next(e for e in IODirectionDATA if e.name == orig.name) + if orig is not None + else None + ) + + +def convert_Connection(orig: ConnectionDSL): + if orig is None: + return None + retval = ConnectionDATA() + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_DataServer(orig: DataServerDSL): + if orig is None: + return None + retval = DataServerDATA() + retval.api_level = orig.api_level + retval.port = convert_dynamic( + orig.port, + source_type_hint=AnyDSL, + target_type_hint=AnyDATA, + orig_is_collection=False, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.host = orig.host + retval.uid = orig.uid + retval.leader_uid = orig.leader_uid + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_DeviceSetup(orig: DeviceSetupDSL): + if orig is None: + return None + retval = DeviceSetupDATA() + retval.logical_signal_groups = convert_dynamic( + orig.logical_signal_groups, + source_type_hint=LogicalSignalGroupDSL, + target_type_hint=LogicalSignalGroupDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.instruments = convert_dynamic( + orig.instruments, + source_type_hint=InstrumentDSL, + target_type_hint=InstrumentDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.servers = convert_dynamic( + orig.servers, + source_type_hint=DataServerDSL, + target_type_hint=ServerDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.uid = orig.uid + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_HDAWG(orig: HDAWGDSL): + if orig is None: + return None + retval = HDAWGDATA() + retval.uid = orig.uid + retval.interface = orig.interface + retval.connections = convert_dynamic( + orig.connections, + source_type_hint=ConnectionDSL, + target_type_hint=ConnectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.address = orig.address + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_Instrument(orig: InstrumentDSL): + if orig is None: + return None + retval = InstrumentDATA() + retval.uid = orig.uid + retval.interface = orig.interface + retval.connections = convert_dynamic( + orig.connections, + source_type_hint=ConnectionDSL, + target_type_hint=ConnectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_LogicalSignal(orig: LogicalSignalDSL): + if orig is None: + return None + retval = LogicalSignalDATA() + retval.uid = orig.uid + retval.name = orig.name + retval.path = orig.path + retval.direction = convert_IODirection(orig.direction) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_LogicalSignalGroup(orig: LogicalSignalGroupDSL): + if orig is None: + return None + retval = LogicalSignalGroupDATA() + retval.uid = orig.uid + retval.logical_signals = convert_dynamic( + orig.logical_signals, + source_type_string="Dict", + target_type_string="Dict[str,LogicalSignal]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_PQSC(orig: PQSCDSL): + if orig is None: + return None + retval = PQSCDATA() + retval.uid = orig.uid + retval.interface = orig.interface + retval.connections = convert_dynamic( + orig.connections, + source_type_hint=ConnectionDSL, + target_type_hint=ConnectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.address = orig.address + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_PhysicalChannel(orig: PhysicalChannelDSL): + if orig is None: + return None + retval = PhysicalChannelDATA() + retval.uid = orig.uid + retval.type = convert_PhysicalChannelType(orig.type) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_Port(orig: PortDSL): + if orig is None: + return None + retval = PortDATA() + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_QuantumElement(orig: QuantumElementDSL): + if orig is None: + return None + retval = QuantumElementDATA() + retval.parameters = convert_dynamic( + orig.parameters, + source_type_string="Dict", + target_type_string="List", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.uid = orig.uid + retval.signals = convert_dynamic( + orig.signals, + source_type_string="Dict", + target_type_string="List[LogicalSignal]", + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_Qubit(orig: QubitDSL): + if orig is None: + return None + retval = QubitDATA() + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_SHFQA(orig: SHFQADSL): + if orig is None: + return None + retval = SHFQADATA() + retval.uid = orig.uid + retval.interface = orig.interface + retval.connections = convert_dynamic( + orig.connections, + source_type_hint=ConnectionDSL, + target_type_hint=ConnectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.address = orig.address + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_SHFSG(orig: SHFSGDSL): + if orig is None: + return None + retval = SHFSGDATA() + retval.uid = orig.uid + retval.interface = orig.interface + retval.connections = convert_dynamic( + orig.connections, + source_type_hint=ConnectionDSL, + target_type_hint=ConnectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.address = orig.address + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_Server(orig: ServerDSL): + if orig is None: + return None + retval = ServerDATA() + retval.uid = orig.uid + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) + + +def convert_UHFQA(orig: UHFQADSL): + if orig is None: + return None + retval = UHFQADATA() + retval.uid = orig.uid + retval.interface = orig.interface + retval.connections = convert_dynamic( + orig.connections, + source_type_hint=ConnectionDSL, + target_type_hint=ConnectionDATA, + orig_is_collection=True, + conversion_function_lookup=get_converter_function_setup_description, + ) + retval.address = orig.address + return post_process( + orig, + retval, + conversion_function_lookup=get_converter_function_setup_description, + ) diff --git a/laboneq/implementation/legacy_adapters/converters_setup_description/post_process_setup_description.py b/laboneq/implementation/legacy_adapters/converters_setup_description/post_process_setup_description.py new file mode 100644 index 0000000..8b5aaa0 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/converters_setup_description/post_process_setup_description.py @@ -0,0 +1,153 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from laboneq.data.setup_description import ( + Connection, + DeviceType, + LogicalSignalGroup, + PhysicalChannel, + PhysicalChannelType, + Port, + Setup, + SetupInternalConnection, +) +from laboneq.dsl.device.instrument import Instrument as InstrumentDSL +from laboneq.dsl.device.io_units.physical_channel import ( + PhysicalChannelType as PhysicalChannelTypeDSL, +) + +in_postprocess = False + + +def post_process(source, target, conversion_function_lookup): + global in_postprocess + if in_postprocess: + return target + + device_types = {e.name: e for e in DeviceType} + if type(source).__name__ in device_types: + in_postprocess = True + retval = conversion_function_lookup(InstrumentDSL)(source) + in_postprocess = False + retval.address = source.address + retval.device_type = device_types[type(source).__name__] + retval.server = source.server_uid + return retval + + if type(target) == Setup: + post_process_setup(source, target) + + if type(target) == LogicalSignalGroup: + target.logical_signals = { + ls.uid.split("/")[1]: ls for ls in target.logical_signals + } + return target + + +def is_node_path_in_physical_channel(node_path, physical_channel): + path_split = node_path.split("/") + first_part_of_node_path = path_split[0] + first_path_of_pc_name = physical_channel.name.split("_")[0] + if not first_part_of_node_path.lower() == first_path_of_pc_name: + return False + if len(path_split) >= 3: + last_parth_of_node_path = path_split[-1] + last_part_of_pc_name = physical_channel.name.split("_")[-1] + if not last_parth_of_node_path.lower() == last_part_of_pc_name: + return False + + channel_numbers_in_pc_name = [ + int(s) for s in physical_channel.name.split("_") if s.isdecimal() + ] + if len(channel_numbers_in_pc_name) == 0: + return False + channel_numbers_in_node_path = [ + int(s) for s in node_path.split("/") if s.isdecimal() + ] + if len(channel_numbers_in_node_path) > 1: + return False + if len(channel_numbers_in_node_path) == 0: + return False + if channel_numbers_in_node_path[0] not in channel_numbers_in_pc_name: + return False + return True + + +def post_process_setup(dsl_setup, data_setup): + data_instrument_map = {i.uid: i for i in data_setup.instruments} + dsl_instrument_map = {i.uid: i for i in dsl_setup.instruments} + all_pcs = {} + for device_id, pcg in dsl_setup.physical_channel_groups.items(): + for pc_uid, pc in pcg.channels.items(): + all_pcs[(device_id, pc.name)] = pc + + all_ls = {} + for lsg in data_setup.logical_signal_groups: + for ls in lsg.logical_signals.values(): + all_ls[ls.path] = ls + + for i in data_setup.instruments: + server_uid = i.server + if server_uid is not None: + i.server = next(s for s in data_setup.servers if s.uid == server_uid) + + i.physical_channels = [ + PhysicalChannel( + uid=pc.name, + type=PhysicalChannelType.IQ_CHANNEL + if pc.type == PhysicalChannelTypeDSL.IQ_CHANNEL + else PhysicalChannelType.RF_CHANNEL, + ) + for k, pc in all_pcs.items() + if k[0] == i.uid + ] + + i.ports = [] + i.connections = [] + pcs_of_instrument = [pc for k, pc in all_pcs.items() if k[0] == i.uid] + + for c in dsl_instrument_map[i.uid].connections: + node_path = c.local_port + + pc_of_connection = next( + ( + pc + for pc in pcs_of_instrument + if is_node_path_in_physical_channel(node_path, pc) + ), + None, + ) + + if pc_of_connection is not None: + pc_of_connection = next( + ( + pc + for pc in i.physical_channels + if pc.uid == pc_of_connection.name + ), + None, + ) + + current_port = Port(path=c.local_port, physical_channel=pc_of_connection) + i.ports.append(current_port) + + if c.remote_path in all_ls and pc_of_connection is not None: + i.connections.append( + Connection( + physical_channel=pc_of_connection, + logical_signal=all_ls[c.remote_path], + ) + ) + elif c.remote_path in data_instrument_map: + data_setup.setup_internal_connections.append( + SetupInternalConnection( + from_instrument=i, + to_instrument=data_instrument_map[c.remote_path], + from_port=current_port, + ) + ) + + data_setup.servers = {s.uid: s for s in data_setup.servers} + data_setup.logical_signal_groups = { + lsg.uid: lsg for lsg in data_setup.logical_signal_groups + } diff --git a/laboneq/implementation/legacy_adapters/dynamic_converter.py b/laboneq/implementation/legacy_adapters/dynamic_converter.py new file mode 100644 index 0000000..73fe9c9 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/dynamic_converter.py @@ -0,0 +1,77 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import logging + +_logger = logging.getLogger(__name__) + + +def convert_dynamic( + source_object, + source_type_hint=None, + source_type_string=None, + target_type_hint=None, + target_type_string=None, + orig_is_collection=False, + conversion_function_lookup=None, +): + if source_object is None: + return None + if type(source_object) in [int, float, str, bool]: + return source_object + + if source_type_string == "Dict" and target_type_string == "Dict": + _logger.info(f"Converting Dict with {len(source_object)} elements") + retval = {} + for k, v in source_object.items(): + conversion_function = conversion_function_lookup(type(v)) + + if conversion_function is not None: + retval[k] = conversion_function(v) + else: + retval[k] = v + return retval + + conversion_function = conversion_function_lookup(type(source_object)) + if conversion_function is not None: + # _logger.info(f"Found conversion function for type {type(source_object)}") + return conversion_function(source_object) + + if source_type_string == "List": + _logger.info(f"Converting List with {len(source_object)} elements") + retval = [] + for s in source_object: + conversion_function = conversion_function_lookup(type(s)) + retval.append(conversion_function(s)) + return retval + + if orig_is_collection: + retval = [] + if source_object is not None: + _logger.info( + f"Converting collection with {len(source_object)} items for type {source_type_hint} to type {target_type_hint}" + ) + if isinstance(source_object, dict): + source_collection = source_object.values() + else: + source_collection = source_object + + for s in source_collection: + conversion_function = conversion_function_lookup(type(s)) + if conversion_function is None: + # _logger.info( + # f"Conversion function not found for type {type(s)}, looking up by type hint {source_type_hint}" + # ) + conversion_function = conversion_function_lookup(source_type_hint) + if conversion_function is None: + raise Exception( + f"Conversion function not found for type hint {source_type_hint}" + ) + + retval.append(conversion_function(s)) + _logger.info(f"List converted with {len(retval)} elements") + return retval + _logger.info( + f"NOT doing anythiing , return source object of type {type(source_object)}, source_type_hint: {source_type_hint}, source_type_string: {source_type_string}, target_type_hint: {target_type_hint}, target_tpye_string: {target_type_string}, orig_is_collection: {orig_is_collection}, conversion_function_lookup: {conversion_function_lookup}" + ) + return None diff --git a/laboneq/implementation/legacy_adapters/legacy_dsl_adapters/__init__.py b/laboneq/implementation/legacy_adapters/legacy_dsl_adapters/__init__.py new file mode 100644 index 0000000..41b2fb7 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/legacy_dsl_adapters/__init__.py @@ -0,0 +1,271 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import laboneq.dsl.experiment.pulse_library +from laboneq.application_management.application_manager import ApplicationManager +from laboneq.data.execution_payload import ExecutionPayload +from laboneq.data.experiment_description import ( + AcquireLoopRt, + ExecutionType, + Experiment, + PlayPulse, + PulseFunctional, + Section, + Sweep, +) +from laboneq.data.experiment_results import ExperimentResults +from laboneq.data.scheduled_experiment import ScheduledExperiment +from laboneq.data.setup_description import Setup +from laboneq.data.setup_description.setup_helper import SetupHelper +from laboneq.interfaces.experiment.experiment_api import ExperimentAPI + + +class OscillatorGetter: + def __init__(self, logical_signal): + self.logical_signal = logical_signal + + def __setattr__(self, __name: str, __value): + if hasattr(self, "logical_signal"): + if hasattr(self.logical_signal, "calibration"): + if hasattr(self.logical_signal.calibration.oscillator, __name): + self.logical_signal.calibration.oscillator.__setattr__( + __name, __value + ) + return + self.__dict__[__name] = __value + + +class ExperimentResultsAdapter: + def __init__(self, experiment_results): + self.experiment_results = experiment_results + self.compiled_experiment = ScheduledExperiment() + + def __getattr__(self, __name: str): + if hasattr(self.experiment_results, __name): + return self.experiment_results.__getattribute__(__name) + else: + if __name == "device_setup": + return None + if __name == "get_data": + return self.get_data + if __name == "get_axis": + return self.get_axis + + def get_data(self, *args, **kwargs): + return self.compiled_experiment + + def get_axis(self, *args, **kwargs): + return [0] + + def get_axis_name(self, *args, **kwargs): + return ["dummy"] + + +class DeviceSetupAdapter: + def from_descriptor( + yaml_text: str, + server_host: str = None, + server_port: str = None, + setup_name: str = None, + ): + from laboneq.application_management.application_manager import ( + ApplicationManager, + ) + + l1q: ExperimentAPI = ApplicationManager.instance().laboneq() + retval = l1q.device_setup_from_descriptor( + yaml_text, server_host, server_port, setup_name + ) + for ls in SetupHelper.flat_logical_signals(retval): + ls[1].oscillator = OscillatorGetter(ls) + return retval + + +class SignalCalibrationAdapter: + def __init__(self, *args, **kwargs): + pass + + +class LegacySessionAdapter: + def __init__(self, device_setup: Setup): + self.compiled_experiment = ScheduledExperiment() + self.results = ExperimentResultsAdapter(ExperimentResults()) + app_manager = ApplicationManager.instance() + self.l1q = app_manager.laboneq() + self.l1q.set_current_setup(device_setup) + self.do_emulation = False + + def connect(self, do_emulation: bool = False, ignore_version_mismatch=False): + self.do_emulation = do_emulation + + def compile(self, experiment: Experiment) -> ExecutionPayload: + self.l1q.set_current_experiment(experiment.data_experiment) + self.l1q.map_signals(experiment.signal_mappings) + self.compiled_experiment = self.l1q.build_payload_for_current_experiment() + return self.compiled_experiment + + def run(self, compiled_experiment: ExecutionPayload = None): + if compiled_experiment is None: + compiled_experiment = self.compiled_experiment + self.results = self.l1q.run_payload(compiled_experiment) + return self.results + + def get_results(self, *args, **kwargs): + return self.results + + +class ExperimentAdapter: + def __init__(self, uid=None, signals=None): + self.uid = uid + if signals is None: + signals = [] + + self.signal_mappings = {} + self.data_experiment = Experiment() + self.data_experiment.signals = signals + self._section_stack = [] + + def map_signal(self, experiment_signal_uid: str, logical_signal): + if experiment_signal_uid not in {s.uid for s in self.data_experiment.signals}: + raise ValueError( + "Signal {} not found in experiment".format(experiment_signal_uid) + ) + self.signal_mappings[experiment_signal_uid] = logical_signal.path + + def sweep(self, uid=None, parameter=None): + section = Sweep(uid=uid, parameters=[parameter]) + return SectionContext(self, section) + + def acquire_loop_rt( + self, + uid=None, + acquisition_type=None, + averaging_mode=None, + count=None, + repetition_mode=None, + repetition_time=None, + reset_oscillator_phase=False, + ): + section = AcquireLoopRt( + uid=uid, + acquisition_type=acquisition_type, + averaging_mode=averaging_mode, + count=count, + execution_type=ExecutionType.REAL_TIME, + repetition_mode=repetition_mode, + repetition_time=repetition_time, + reset_oscillator_phase=reset_oscillator_phase, + ) + return SectionContext(self, section) + + def section(self, uid=None, execution_type=None): + section = Section(uid=uid, execution_type=execution_type) + return SectionContext(self, section) + + def play( + self, + signal, + pulse, + amplitude=None, + phase=None, + increment_oscillator_phase=None, + set_oscillator_phase=None, + length=None, + pulse_parameters=None, + precompensation_clear=None, + marker=None, + ): + self._register_pulse(pulse) + operation = PlayPulse( + signal_uid=signal, + pulse=pulse, + amplitude=amplitude, + increment_oscillator_phase=increment_oscillator_phase, + phase=phase, + set_oscillator_phase=set_oscillator_phase, + length=length, + pulse_parameters=pulse_parameters, + precompensation_clear=precompensation_clear, + marker=marker, + ) + self._push_operation(operation) + + def acquire(self, *args, **kwargs): + return self + + def _push_section(self, section): + if section.execution_type is None: + if self._section_stack: + parent_section = self._peek_section() + execution_type = parent_section.execution_type + else: + execution_type = ExecutionType.NEAR_TIME + section.execution_type = execution_type + self._section_stack.append(section) + + def _push_operation(self, operation): + section = self._peek_section() + section.children.append(operation) + + def _pop_and_add_section(self): + if not self._section_stack: + raise ValueError( + "Internal error: Section stack should not be empty. Unbalanced push/pop." + ) + section = self._section_stack.pop() + self._add_section_to_current_section(section) + + def _add_section_to_current_section(self, section): + if not self._section_stack: + self.data_experiment.sections.append(section) + else: + current_section = self._section_stack[-1] + current_section.children.append(section) + + def _peek_section(self): + if not self._section_stack: + raise ValueError( + "No section in experiment. Use 'with your_exp.section(...):' to create a section scope first." + ) + return self._section_stack[-1] + + def _peek_rt_section(self): + if not self._section_stack: + raise ValueError( + "No section in experiment. Use 'with your_exp.section(...):' to create a section scope first." + ) + for s in reversed(self._section_stack): + if s.execution_type == ExecutionType.REAL_TIME: + return s + raise ValueError( + "No surrounding realtime section in experiment. Use 'with your_exp.acquire_loop_rt(...):' to create a section scope first." + ) + + def _register_pulse(self, pulse): + if pulse.uid is None: + pulse.uid = "pulse_{}".format(len(self.data_experiment.pulses)) + if pulse.uid in {p.uid for p in self.data_experiment.pulses}: + return + self.data_experiment.pulses.append(pulse) + return pulse + + +class SectionContext: + def __init__(self, experiment, section): + self.exp = experiment + self.section = section + + def __enter__(self): + self.exp._push_section(self.section) + return self.section + + def __exit__(self, exc_type, exc_val, exc_tb): + self.exp._pop_and_add_section() + + +class pulse_library: + @staticmethod + def const(uid=None, length=None, amplitude=None): + return PulseFunctional( + uid=uid, length=length, amplitude=amplitude, function="const" + ) diff --git a/laboneq/implementation/legacy_adapters/simple2.py b/laboneq/implementation/legacy_adapters/simple2.py new file mode 100644 index 0000000..76b8ac2 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/simple2.py @@ -0,0 +1,33 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +# ruff: noqa +""" +Convenience header for the LabOne Q project. +""" + +from laboneq.data.calibration import ( + CarrierType, + MixerCalibration, + ModulationType, + Oscillator, +) +from laboneq.data.experiment_description import ( + AcquisitionType, + AveragingMode, + ExperimentSignal, + LinearSweepParameter, +) +from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( + DeviceSetupAdapter as DeviceSetup, +) +from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( + ExperimentAdapter as Experiment, +) +from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( + LegacySessionAdapter as Session, +) +from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( + SignalCalibrationAdapter as SignalCalibration, +) +from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import pulse_library diff --git a/laboneq/implementation/payload_builder/__init__.py b/laboneq/implementation/payload_builder/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/implementation/payload_builder/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/implementation/payload_builder/payload_builder.py b/laboneq/implementation/payload_builder/payload_builder.py index 69b602e..b5631a8 100644 --- a/laboneq/implementation/payload_builder/payload_builder.py +++ b/laboneq/implementation/payload_builder/payload_builder.py @@ -1,9 +1,12 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +import copy import logging import uuid -from typing import Dict, List, Union +from collections import Counter +from dataclasses import dataclass, field +from typing import Any, Dict, List, Union from laboneq.data.compilation_job import ( CompilationJob, @@ -24,6 +27,8 @@ NearTimeOperation, NearTimeOperationType, NearTimeProgram, + NtStepKey, + RealTimeExecutionInit, Recipe, ServerType, TargetDevice, @@ -61,6 +66,14 @@ _logger = logging.getLogger(__name__) +@dataclass +class GlobalSetupProperties: + global_leader: str = None + is_desktop_setup: bool = False + internal_followers: List[str] = field(default_factory=list) + clock_settings: Dict[str, Any] = field(default_factory=dict) + + class PayloadBuilder(PayloadBuilderAPI): def __init__(self, compilation_service: CompilationServiceAPI = None): self._compilation_service: CompilationServiceAPI = compilation_service @@ -110,6 +123,10 @@ def build_payload( Compose an experiment from a setup descriptor and an experiment descriptor. """ + experiment = copy.deepcopy(experiment) + if experiment.signals is None: + experiment.signals = [] + experiment_info = self.extract_experiment_info( experiment, device_setup, signal_mappings ) @@ -126,23 +143,66 @@ def build_payload( target_recipe = Recipe() + global_setup_properties = self._analyze_setup(device_setup, experiment_info) + + config_dict = self._calc_config(global_setup_properties) + + for k, v in self._analyze_dio(device_setup, global_setup_properties).items(): + if k not in config_dict: + config_dict[k] = {} + config_dict[k]["triggering_mode"] = v + def build_config(init): - config = init["config"] + device_uid = init["device_uid"] + if device_uid in config_dict: + config = config_dict[device_uid] + return InitializationConfiguration( + reference_clock=10e6, # FIXME: hardcoded + triggering_mode=config.get("triggering_mode"), + ) + return InitializationConfiguration( reference_clock=10e6, # FIXME: hardcoded - reference_clock_source=config.get("reference_clock_source"), - dio_mode=config.get("dio_mode"), ) + def _find_initialization(recipe, instrument_uid): + for init in recipe["experiment"]["initializations"]: + if init["device_uid"] == instrument_uid: + return init + return None + + for srv in device_setup.servers.values(): + if srv.leader_uid is not None: + init = _find_initialization(compiled_experiment.recipe, srv.leader_uid) + if init is not None: + init["config"]["repetitions"] = 1 + init["config"]["holdoff"] = 0 + + # adapt initializations to consider setup internal connections + _logger.info( f"initializations: {compiled_experiment.recipe['experiment']['initializations']}" ) target_recipe.initializations = [ - Initialization(device=device_dict[i["device_uid"]], config=build_config(i)) + Initialization( + device=device_dict[i["device_uid"]], + config=build_config(i), + ) for i in compiled_experiment.recipe["experiment"]["initializations"] ] _logger.info(f"Built initializations: {target_recipe.initializations}") + target_recipe.realtime_execution_init = [ + RealTimeExecutionInit( + device=next(d for d in target_setup.devices if d.uid == i["device_id"]), + awg_id=i["awg_id"], + seqc=i["seqc_ref"], # todo: create SourceCode object + wave_indices_ref=i["wave_indices_ref"], + nt_step=NtStepKey(**i["nt_step"]), + ) + for i in compiled_experiment.recipe["experiment"]["realtime_execution_init"] + ] + ntp = NearTimeProgramFactory().make(experiment) _logger.info(f"Built NearTimeProgram: {ntp}") @@ -152,10 +212,162 @@ def build_config(init): compiled_experiment_hash=compiled_experiment.uid, recipe=target_recipe, near_time_program=ntp, - src=compiled_experiment.src, + src=compiled_experiment.src, # todo: create SourceCode object ) return run_job + def _calc_config( + self, global_setup_properties: GlobalSetupProperties + ) -> Dict[str, Any]: + retval = {} + if global_setup_properties.global_leader is not None: + retval[global_setup_properties.global_leader.uid] = { + "config": { + "repetitions": 1, + "holdoff": 0, + } + } + if global_setup_properties.is_desktop_setup: + retval[global_setup_properties.global_leader.uid]["config"][ + "triggering_mode" + ] = "desktop_leader" + + if global_setup_properties.is_desktop_setup: + # Internal followers are followers on the same device as the leader. This + # is necessary for the standalone SHFQC, where the SHFSG part does neither + # appear in the PQSC device connections nor the DIO connections. + for f in global_setup_properties.internal_followers: + if f.uid not in retval: + retval[f.uid] = {"config": {}} + retval[f.uid]["config"]["triggering_mode"] = "dio_follower" + + return retval + + def _analyze_dio( + self, device_setup: Setup, global_setup_properties: GlobalSetupProperties + ): + retval = {} + for sic in device_setup.setup_internal_connections: + if sic.from_port.path.startswith("DIOS"): + if global_setup_properties.is_desktop_setup: + retval[sic.to_instrument.uid] = "desktop_dio_follower" + else: + retval[sic.to_instrument.uid] = "dio_follower" + + if sic.from_port.path.startswith("ZSYNCS"): + retval[sic.from_instrument.uid] = "zsync_follower" + + return retval + + def _analyze_setup( + self, device_setup: Setup, experiment_info: ExperimentInfo + ) -> GlobalSetupProperties: + retval = GlobalSetupProperties() + + def get_first_instr_of(device_infos: List[DeviceInfo], type) -> DeviceInfo: + return next((instr for instr in device_infos if instr.device_type == type)) + + device_info_dict: Dict[str, DeviceInfo] = {} + for signal in experiment_info.signals: + device_info_dict[signal.device.uid] = signal.device + + device_type_list = [i.device_type for i in device_info_dict.values()] + type_counter = Counter(device_type_list) + has_pqsc = type_counter[DeviceInfoType.PQSC] > 0 + has_hdawg = type_counter[DeviceInfoType.HDAWG] > 0 + has_shfsg = type_counter[DeviceInfoType.SHFSG] > 0 + has_shfqa = type_counter[DeviceInfoType.SHFQA] > 0 + shf_types = {DeviceInfoType.SHFQA, DeviceInfoType.SHFQC, DeviceInfoType.SHFSG} + has_shf = bool(shf_types.intersection(set(device_type_list))) + + # Basic validity checks + signal_infos = experiment_info.signals + + used_devices = set(info.device.device_type for info in signal_infos) + + def get_instrument_by_uid(uid) -> Instrument: + return next((i for i in device_setup.instruments if i.uid == uid), None) + + used_device_serials = set( + get_instrument_by_uid(info.device.uid).address for info in signal_infos + ) + if ( + DeviceInfoType.HDAWG in used_devices + and DeviceInfoType.UHFQA in used_devices + and bool(shf_types.intersection(used_devices)) + ): + raise RuntimeError( + "Setups with signals on each of HDAWG, UHFQA and SHF type " + + "instruments are not supported" + ) + + retval.is_desktop_setup = not has_pqsc and ( + used_devices == {DeviceInfoType.HDAWG} + or used_devices == {DeviceInfoType.SHFSG} + or used_devices == {DeviceInfoType.SHFQA} + or used_devices == {DeviceInfoType.SHFQA, DeviceInfoType.SHFSG} + and len(used_device_serials) == 1 # SHFQC + or used_devices == {DeviceInfoType.HDAWG, DeviceInfoType.UHFQA} + or ( + used_devices == {DeviceInfoType.UHFQA} and has_hdawg + ) # No signal on leader + ) + if ( + not has_pqsc + and not retval.is_desktop_setup + and used_devices != {DeviceInfoType.UHFQA} + and bool(used_devices) # Allow empty experiment (used in tests) + ): + raise RuntimeError( + f"Unsupported device combination {used_devices} for small setup" + ) + + leader = experiment_info.global_leader_device + device_infos = list(device_info_dict.values()) + if retval.is_desktop_setup: + if leader is None: + if has_hdawg: + leader = get_first_instr_of(device_infos, DeviceInfoType.HDAWG) + elif has_shfqa: + leader = get_first_instr_of(device_infos, DeviceInfoType.SHFQA) + if has_shfsg: # SHFQC + retval.internal_followers = [ + get_first_instr_of(device_infos, DeviceInfoType.SHFSG) + ] + elif has_shfsg: + leader = get_first_instr_of(device_infos, DeviceInfoType.SHFSG) + + _logger.debug("Using desktop setup configuration with leader %s", leader) + + if has_hdawg or has_shfsg and not has_shfqa: + _logger.warning( + "Not analyzing if awg 0 of leader is used. Triggering may fail." + ) + # TODO: Check if awg 0 of leader is used, and add dummy signal if not + + has_qa = type_counter[DeviceInfoType.SHFQA] > 0 or type_counter["uhfqa"] > 0 + is_hdawg_solo = ( + type_counter[DeviceInfoType.HDAWG] == 1 and not has_shf and not has_qa + ) + if is_hdawg_solo: + first_hdawg = get_first_instr_of(device_infos, DeviceInfoType.HDAWG) + if first_hdawg.reference_clock_source is None: + retval.clock_settings[first_hdawg.uid] = "internal" + else: + if not has_hdawg and has_shfsg: # SHFSG or SHFQC solo + first_shfsg = get_first_instr_of(device_infos, DeviceInfoType.SHFSG) + if first_shfsg.reference_clock_source is None: + retval.clock_settings[first_shfsg.uid] = "internal" + if not has_hdawg and has_shfqa: # SHFQA or SHFQC solo + first_shfqa = get_first_instr_of(device_infos, DeviceInfoType.SHFQA) + if first_shfqa.reference_clock_source is None: + retval.clock_settings[first_shfqa.uid] = "internal" + + retval.use_2GHz_for_HDAWG = has_shf + retval.global_leader = leader + + return retval + @classmethod def extract_experiment_info( cls, @@ -359,10 +571,10 @@ def _handle_children( self._append_statement( NearTimeOperation( operation_type=NearTimeOperationType.FOR_LOOP, + children=[loop_body], args={ "count": child.count, - "body": loop_body, - "loop_type": LoopType.AVERAGE, + "loop_type": LoopType.SWEEP, }, ) ) @@ -373,9 +585,9 @@ def _handle_children( self._append_statement( NearTimeOperation( operation_type=NearTimeOperationType.ACQUIRE_LOOP_RT, + children=[loop_body], args={ "count": child.count, - "body": loop_body, "uid": child.uid, "averaging_mode": str(child.averaging_mode), "acquisition_type": str(child.acquisition_type), @@ -394,9 +606,9 @@ def _handle_children( self._append_statement( NearTimeOperation( operation_type=NearTimeOperationType.FOR_LOOP, + children=[loop_body], args={ "count": count, - "body": loop_body, "loop_type": loop_type, }, ) diff --git a/laboneq/implementation/runner/__init__.py b/laboneq/implementation/runner/__init__.py new file mode 100644 index 0000000..b1f929b --- /dev/null +++ b/laboneq/implementation/runner/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from .runner import Runner diff --git a/laboneq/implementation/runner/runner.py b/laboneq/implementation/runner/runner.py index 6abdf9c..b09f9c6 100644 --- a/laboneq/implementation/runner/runner.py +++ b/laboneq/implementation/runner/runner.py @@ -1,65 +1,83 @@ -# Copyright 2020 Zurich Instruments AG +# Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 import logging +import time +from random import random +from threading import Timer from laboneq.data.execution_payload import ExecutionPayload, TargetSetup from laboneq.data.experiment_results import ExperimentResults from laboneq.interfaces.runner.runner_api import RunnerAPI -from laboneq.interfaces.runner.runner_control_api import RunnerControlAPI _logger = logging.getLogger(__name__) -class Runner(RunnerAPI, RunnerControlAPI): +class Runner(RunnerAPI): """ This the core implementation of the experiment runner. """ - # Currently, just a dummy implementation - def __init__(self): - pass + self._job_queue = [] + self._job_results = {} def submit_execution_payload(self, job: ExecutionPayload): """ Submit an experiment run job. """ + job_id = len(self._job_queue) + queue_entry = {"job_id": job_id, "job": job} - return None + def complete_job(): + acquired_results = {k: random() for k in job.recipe.measurement_map.keys()} + results = ExperimentResults(acquired_results=acquired_results) + self._job_results[job_id] = results + + Timer(1, complete_job).start() + self._job_queue.append(queue_entry) + return job_id def run_job_status(self, job_id: str): """ Get the status of an experiment run job. """ - return None + return next(j for j in self._job_queue if j["job_id"] == job_id) def run_job_result(self, job_id: str) -> ExperimentResults: """ Get the result of an experiment run job. Blocks until the result is available. """ - return None + num_tries = 10 + while True: + result = self._job_results.get(job_id) + if result: + return result + if num_tries == 0: + break + num_tries -= 1 + time.sleep(100e-3) def connect(self, setup: TargetSetup, do_emulation: bool = True): """ Connect to the setup """ - return None + pass def start(self): """ Start the experiment runner. It will start processing jobs from the job queue. """ - return None + pass def stop(self): """ Stop the experiment runner. It will stop processing jobs from the job queue. """ - return None + pass def disconnect(self): """ Disconnect from the setup. """ - return None + pass diff --git a/laboneq/implementation/runner/runner_legacy.py b/laboneq/implementation/runner/runner_legacy.py new file mode 100644 index 0000000..30161f7 --- /dev/null +++ b/laboneq/implementation/runner/runner_legacy.py @@ -0,0 +1,411 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +import logging +import time + +from box import Box + +from laboneq import controller as ctrl +from laboneq.data.execution_payload import ( + ExecutionPayload, + LoopType, + NearTimeOperation, + NearTimeOperationType, + NearTimeProgram, + TargetSetup, +) +from laboneq.data.experiment_results import ExperimentResults +from laboneq.executor import executor +from laboneq.interfaces.runner.runner_api import RunnerAPI +from laboneq.interfaces.runner.runner_control_api import RunnerControlAPI + +_logger = logging.getLogger(__name__) + + +class RunnerLegacy(RunnerAPI, RunnerControlAPI): + """ + This the core implementation of the experiment runner. + """ + + def __init__(self): + self._job_queue = [] + self._job_results = {} + self._connected = False + self._controller = None + + def connect(self, setup: TargetSetup, do_emulation: bool = True): + _logger.debug(f"Connecting to TargetSetup {setup.uid}") + emulated = True + run_parameters = ctrl.ControllerRunParameters() + run_parameters.dry_run = emulated + run_parameters.ignore_lab_one_version_error = False + + device_setup = convert_to_device_setup(setup) + + controller = ctrl.Controller( + run_parameters=run_parameters, + device_setup=device_setup, + user_functions={}, + ) + controller.connect() + self._controller = controller + self._connected = True + + def submit_execution_payload(self, job: ExecutionPayload): + """ + Submit an experiment run job. + """ + job_id = len(self._job_queue) + queue_entry = {"job_id": job_id, "job": job} + + self._job_queue.append(queue_entry) + if not self._connected: + self.connect(job.target_setup) + + compiled_experiment = convert_to_compiled_experiment(job) + + self._controller.execute_compiled(compiled_experiment) + controller_results = self._controller._results + self._job_results[job_id] = ExperimentResults( + acquired_results=controller_results.acquired_results, + user_func_results=controller_results.user_func_results, + execution_errors=controller_results.execution_errors, + ) + + return job_id + + def run_job_status(self, job_id: str): + """ + Get the status of an experiment run job. + """ + return next(j for j in self._job_queue if j["job_id"] == job_id) + + def run_job_result(self, job_id: str) -> ExperimentResults: + """ + Get the result of an experiment run job. Blocks until the result is available. + """ + num_tries = 10 + while True: + result = self._job_results.get(job_id) + if result: + return result + if num_tries == 0: + break + num_tries -= 1 + time.sleep(100e-3) + + def start(self): + """ + Start the experiment runner. It will start processing jobs from the job queue. + """ + pass + + def stop(self): + """ + Stop the experiment runner. It will stop processing jobs from the job queue. + """ + pass + + def disconnect(self): + """ + Disconnect from the setup. + """ + pass + + +def convert_to_device_setup(setup: TargetSetup): + _logger.debug(f"Converting setup to device setup: {setup}") + retval = {} + + retval["servers"] = { + s.uid: { + "uid": s.uid, + "host": s.address, + "port": s.port, + "api_level": s.api_level, + } + for s in setup.servers + } + + def driver_calculator(device_type): + def retval(): + return device_type.name + + return retval + + def options_calculator(target_device): + def retval(): + return { + "serial": target_device.device_serial, + "interface": target_device.interface, + } + + return retval + + retval["instruments"] = [ + Box( + { + "uid": i.uid, + "address": i.device_serial, + "server_uid": i.server.uid, + "connections": [], + "device_type": i.device_type, + "reference_clock_source": None, + "calc_driver": driver_calculator(i.device_type), + "calc_options": options_calculator(i), + } + ) + for i in setup.devices + ] + + def instrument_by_uid(uid): + return next(i for i in retval["instruments"] if i["uid"] == uid) + + retval["instrument_by_uid"] = instrument_by_uid + + return Box(retval) + + +def convert_to_compiled_experiment(job: ExecutionPayload): + _logger.debug(f"Converting job to compiled experiment: {job}") + retval = {} + + def convert_config(config): + return {k: getattr(config, k) for k in ["reference_clock", "triggering_mode"]} + + retval["recipe"] = { + "$schema": "../../interface/qccs/interface/schemas/recipe-schema-1_4_0.json", + "line_endings": "unix", + "header": { + "version": "1.4.0", + "unit": {"time": "s", "frequency": "Hz", "phase": "rad"}, + "epsilon": {"time": 1e-12}, + }, + } + + retval["recipe"]["experiment"] = { + "initializations": [ + {"device_uid": i.device.uid, "config": convert_config(i.config)} + for i in job.recipe.initializations + ], + "realtime_execution_init": [ + { + "device_id": i.device.uid, + "awg_id": i.awg_id, + "seqc_ref": i.seqc, + "wave_indices_ref": i.wave_indices_ref, + "nt_step": {"indices": i.nt_step.indices}, + } + for i in job.recipe.realtime_execution_init + ], + } + + import json + + from laboneq.data.execution_payload.execution_payload_helper import ( + ExecutionPayloadHelper, + ) + + _logger.debug( + f"Near time program:\n{json.dumps(ExecutionPayloadHelper.dump_near_time_program(job.near_time_program), indent=2)}" + ) + + retval["execution"] = convert(job.near_time_program) + + for init in retval["recipe"]["experiment"]["initializations"]: + if ( + "triggering_mode" not in init["config"] + or init["config"]["triggering_mode"] is None + ): + # TODO: the converter currently does not properly add the dio mode + # whereas the device_setup_generator does. As the converter will be + # removed in the future, I ignore this for now. + _logger.warning( + f"Missing triggering_mode in config for {init['device_uid']} - patching" + ) + init["config"]["triggering_mode"] = "dio_follower" + + _logger.debug(f"Converted job to compiled experiment: {retval}") + return Box(retval) + + +class ExecutionFactoryFromNearTimeProgram(executor.ExecutionFactory): + def make(self, near_time_program: NearTimeProgram) -> executor.Statement: + self._handle_children(near_time_program.children, near_time_program.uid) + return self._root_sequence + + @staticmethod + def is_operation(op: NearTimeOperationType): + return op not in ( + NearTimeOperationType.ACQUIRE_LOOP_NT, + NearTimeOperationType.ACQUIRE_LOOP_RT, + NearTimeOperationType.FOR_LOOP, + ) + + @staticmethod + def convert_loop_type(loop_type: LoopType): + return { + LoopType.AVERAGE: executor.LoopType.AVERAGE, + LoopType.SWEEP: executor.LoopType.SWEEP, + LoopType.HARDWARE: executor.LoopType.HARDWARE, + }[loop_type] + + def _handle_children(self, children, parent_uid: str): + for child in children: + if child.operation_type is None: + body = self._sub_scope(self._handle_children, child.children, child.uid) + sequence = executor.Sequence() + sequence.append_statement(body) + if self.is_operation(child.operation_type): + self._append_statement( + self._statement_from_operation(child, parent_uid) + ) + elif child.operation_type == NearTimeOperationType.FOR_LOOP: + loop_body = self._sub_scope( + self._handle_children, child.children, child.uid + ) + self._append_statement( + executor.ForLoop( + child.args["count"], + loop_body, + self.convert_loop_type(child.args["loop_type"]), + ) + ) + elif child.operation_type == NearTimeOperationType.ACQUIRE_LOOP_NT: + loop_body = self._sub_scope( + self._handle_children, child.children, child.uid + ) + self._append_statement( + executor.ExecRT( + count=child.count, + body=loop_body, + uid=child.uid, + averaging_mode=child.averaging_mode, + acquisition_type=child.acquisition_type, + ) + ) + else: + sub_sequence = self._sub_scope( + self._handle_children, child.children, child.uid + ) + self._append_statement(sub_sequence) + + def _handle_sweep(self, sweep: NearTimeOperation): + for parameter in sweep.args["parameters"]: + self._append_statement(self._statement_from_param(parameter)) + self._handle_children(sweep.children, sweep.uid) + + def _statement_from_operation(self, operation, parent_uid: str): + if operation.operation_type == NearTimeOperationType.CALL: + return executor.ExecUserCall( + operation.args["func_name"], operation.args["args"] + ) + if operation.operation_type == NearTimeOperationType.SET: + return executor.ExecSet(operation.args["path"], operation.args["value"]) + if operation.operation_type == NearTimeOperationType.PLAY_PULSE: + return executor.Nop() + if operation.operation_type == NearTimeOperationType.DELAY: + return executor.Nop() + if operation.operation_type == NearTimeOperationType.RESERVE: + return executor.Nop() + if operation.operation_type == NearTimeOperationType.ACQUIRE: + return executor.ExecAcquire(operation.handle, operation.signal, parent_uid) + + return executor.Nop() + + +from laboneq.data.execution_payload import ( + ExecutionPayload, + LoopType, + NearTimeOperation, + NearTimeOperationType, + NearTimeProgram, + TargetSetup, +) +from laboneq.data.execution_payload.execution_payload_helper import ( + ExecutionPayloadHelper, +) +from laboneq.executor import executor + + +def convert_loop_type(loop_type: LoopType): + return { + LoopType.AVERAGE: executor.LoopType.AVERAGE, + LoopType.SWEEP: executor.LoopType.SWEEP, + LoopType.HARDWARE: executor.LoopType.HARDWARE, + }[loop_type] + + +def convert(near_time_program: NearTimeProgram): + root_marker = "____ROOT___" + context = {"nodes_by_parent": {}} + + def execution_builder_visitor(operation, context, parent): + if parent is not None: + parent_hash = id(parent) + else: + parent_hash = root_marker + current_node_hash = id(operation) + if parent_hash not in context["nodes_by_parent"]: + context["nodes_by_parent"][parent_hash] = [] + + _logger.debug( + f"Visiting {operation}, context: {context}, current node hash: {current_node_hash}, parent hash: {parent_hash}" + ) + if isinstance(operation, NearTimeProgram) or operation.operation_type is None: + sequence = executor.Sequence() + num_chidren = 0 + if current_node_hash in context["nodes_by_parent"]: + num_chidren = len(context["nodes_by_parent"][current_node_hash]) + for child in context["nodes_by_parent"][current_node_hash]: + sequence.append_statement(child) + + _logger.debug(f"Appended {num_chidren} statements to sequence") + context["nodes_by_parent"][parent_hash].append(sequence) + + elif operation.operation_type == NearTimeOperationType.PLAY_PULSE: + context["nodes_by_parent"][parent_hash].append(executor.Nop()) + elif operation.operation_type == NearTimeOperationType.SET: + context["nodes_by_parent"][parent_hash].append( + executor.ExecSet(operation.args["path"], operation.args["value"]) + ) + + elif operation.operation_type == NearTimeOperationType.SET_SOFTWARE_PARM: + param_name = operation.args["parameter_uid"] + values = operation.args["values"] + axis_name = operation.args["axis_name"] + context["nodes_by_parent"][parent_hash].append( + executor.SetSoftwareParam(param_name, values, axis_name) + ) + + elif operation.operation_type == NearTimeOperationType.FOR_LOOP: + loop_body = executor.Sequence() + if current_node_hash in context["nodes_by_parent"]: + for child in context["nodes_by_parent"][current_node_hash]: + loop_body.append_statement(child) + loop = executor.ForLoop( + operation.args["count"], + loop_body, + convert_loop_type(operation.args["loop_type"]), + ) + context["nodes_by_parent"][parent_hash].append(loop) + elif operation.operation_type == NearTimeOperationType.ACQUIRE_LOOP_RT: + loop_body = executor.Sequence() + if current_node_hash in context["nodes_by_parent"]: + for child in context["nodes_by_parent"][current_node_hash]: + loop_body.append_statement(child) + loop = executor.ExecRT( + operation.args["count"], + loop_body, + operation.uid, + averaging_mode=operation.args["averaging_mode"], + acquisition_type=operation.args["acquisition_type"], + ) + context["nodes_by_parent"][parent_hash].append(loop) + + ExecutionPayloadHelper.accept_near_time_program_visitor( + near_time_program, execution_builder_visitor, context + ) + _logger.debug(f"Context: {context}") + return context["nodes_by_parent"][root_marker][0] diff --git a/laboneq/interfaces/__init__.py b/laboneq/interfaces/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/interfaces/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/interfaces/application_management/__init__.py b/laboneq/interfaces/application_management/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/interfaces/application_management/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/interfaces/compilation_service/__init__.py b/laboneq/interfaces/compilation_service/__init__.py new file mode 100644 index 0000000..a5b5a9c --- /dev/null +++ b/laboneq/interfaces/compilation_service/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from .compilation_service_api import CompilationServiceAPI diff --git a/laboneq/interfaces/data_storage/__init__.py b/laboneq/interfaces/data_storage/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/interfaces/data_storage/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/interfaces/data_storage/data_storage_api.py b/laboneq/interfaces/data_storage/data_storage_api.py new file mode 100644 index 0000000..fe31af0 --- /dev/null +++ b/laboneq/interfaces/data_storage/data_storage_api.py @@ -0,0 +1,64 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from abc import ABC +from typing import Any, Callable, Dict, Iterable, Optional, Tuple, Union + + +class DataStorageAPI(ABC): + """ + The interface for a data storage service. This service is used to store and retrieve experiment, setup and result data from a database. + """ + + def get( + self, key: str, with_metadata=False + ) -> Union[Any, Tuple[Any, Dict[str, Any]]]: + pass + + def get_metadata(self, key: str) -> Dict[str, Any]: + pass + + def keys(self) -> Iterable[str]: + """Return an iterable of all keys in the database.""" + raise NotImplementedError + + def store( + self, data: Any, key: str = None, metadata: Optional[Dict[str, Any]] = None + ) -> None: + """ + Store data in the database. Only data that can be serialized with the L1Q serializer can be stored. + + Args: + key (str): The key to store the data under. + data (any): The data to store. + metadata (dict): Metadata to store with the data. Metadata can be used to search for data in the database. + Metadata must have strings as keys, and values may be strings or python standard datetime objects. + """ + raise NotImplementedError + + def delete(self, key: str) -> None: + """ + Delete data from the database. + + Args: + key (str): The key of the data to delete. + """ + raise NotImplementedError + + def find( + self, + metadata: Optional[Dict[str, Any]] = None, + condition: Optional[Callable[[Dict[str, Any]], bool]] = None, + ) -> Iterable[str]: + """ + Find data in the database. + + Args: + metadata (dict): Metadata to search for. If not None, only data where all keys and values match the + metadata will be returned. + If None, all data which also matches the condition will be returned. + + condition (function): A function that takes a single argument (the metadata of a data entry) and returns True if the data entry should be returned. If None, ยจ + all data matching the metadata will be returned. + """ + raise NotImplementedError diff --git a/laboneq/interfaces/experiment/__init__.py b/laboneq/interfaces/experiment/__init__.py new file mode 100644 index 0000000..98e1998 --- /dev/null +++ b/laboneq/interfaces/experiment/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from .experiment_api import ExperimentAPI diff --git a/laboneq/interfaces/payload_builder/__init__.py b/laboneq/interfaces/payload_builder/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/interfaces/payload_builder/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/interfaces/runner/__init__.py b/laboneq/interfaces/runner/__init__.py new file mode 100644 index 0000000..17c557a --- /dev/null +++ b/laboneq/interfaces/runner/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/openqasm3/openqasm3_importer.py b/laboneq/openqasm3/openqasm3_importer.py index 9fd36f3..60f0f3f 100644 --- a/laboneq/openqasm3/openqasm3_importer.py +++ b/laboneq/openqasm3/openqasm3_importer.py @@ -149,14 +149,14 @@ def transpile(self, parent: Union[ast.Program, ast.Box], uid_hint="") -> Section subsect = self._handle_box(child) elif isinstance(child, ast.QuantumBarrier): subsect = self._handle_barrier(child) - elif isinstance(child, ast.QuantumReset): - subsect = self._handle_quantum_reset(child) elif isinstance(child, ast.DelayInstruction): subsect = self._handle_delay_instruction(child) elif isinstance(child, ast.ClassicalAssignment): self._handle_assignment(child) elif isinstance(child, ast.QuantumMeasurementStatement): subsect = self._handle_measurement(child) + elif isinstance(child, ast.QuantumReset): + subsect = self._handle_quantum_reset(child) else: msg = f"Statement type {type(child)} not supported" raise OpenQasmException(msg, mark=child.span) @@ -301,17 +301,6 @@ def _handle_include(self, statement: ast.Include) -> None: msg = f"Only 'stdgates.inc' is supported for include, found '{statement.filename}'." raise OpenQasmException(msg, mark=statement.span) - def _handle_quantum_reset(self, statement: ast.QuantumReset): - # Although ``qubits`` is plural, only a single qubit is allowed. - qubit_name = eval_expression( - statement.qubits, namespace=self.scope - ).canonical_name - try: - return self.gate_store.lookup_gate("reset", (qubit_name,)) - except KeyError as e: - msg = f"Reset gate for qubit '{qubit_name}' not found." - raise OpenQasmException(msg, mark=statement.span) from e - def _handle_delay_instruction(self, statement: ast.DelayInstruction): qubits = statement.qubits duration = eval_expression(statement.duration, namespace=self.scope, type=float) @@ -394,3 +383,14 @@ def _handle_measurement(self, statement: ast.QuantumMeasurementStatement): # Set the bit to a special value to disallow compile time arithmetic b.value = MeasurementResult() return s + + def _handle_quantum_reset(self, statement: ast.QuantumReset): + # Although ``qubits`` is plural, only a single qubit is allowed. + qubit_name = eval_expression( + statement.qubits, namespace=self.scope + ).canonical_name + try: + return self.gate_store.lookup_gate("reset", (qubit_name,)) + except KeyError as e: + msg = f"Reset gate for qubit '{qubit_name}' not found." + raise OpenQasmException(msg, mark=statement.span) from e diff --git a/laboneq/simple.py b/laboneq/simple.py index 7f5ee87..133771b 100644 --- a/laboneq/simple.py +++ b/laboneq/simple.py @@ -25,8 +25,6 @@ ) from laboneq.dsl.device import DeviceSetup from laboneq.dsl.device.device_setup_helper import DeviceSetupHelper -from laboneq.dsl.device.quantum_operations import QuantumOperation -from laboneq.dsl.device.qubits import QuantumElement, Qubit from laboneq.dsl.enums import ( AcquisitionType, AveragingMode, @@ -49,8 +47,10 @@ Sweep, pulse_library, ) +from laboneq.dsl.quantum import QuantumOperation, Qubit, QubitParameters from laboneq.dsl.result import Results from laboneq.dsl.session import Session from laboneq.dsl.utils import has_onboard_lo +from laboneq.implementation.data_storage.l1q_database_wrapper import L1QDatabase from laboneq.pulse_sheet_viewer.pulse_sheet_viewer import show_pulse_sheet from laboneq.simulator.output_simulator import OutputSimulator diff --git a/laboneq/simulator/output_simulator.py b/laboneq/simulator/output_simulator.py index 35566e8..3798088 100644 --- a/laboneq/simulator/output_simulator.py +++ b/laboneq/simulator/output_simulator.py @@ -20,6 +20,7 @@ class OutputData: time: ArrayLike wave: ArrayLike = None trigger: ArrayLike = None + marker: ArrayLike = None frequency: ArrayLike = None @@ -145,6 +146,7 @@ def get_snippet( output_length: float, get_wave: bool = True, get_trigger: bool = False, + get_marker: bool = False, get_frequency: bool = False, ) -> OutputData: channel = ( @@ -162,6 +164,8 @@ def get_snippet( sim_targets |= SimTarget.ACQUIRE if get_trigger and awg_id.is_out: sim_targets |= SimTarget.TRIGGER + if get_marker and awg_id.is_out: + sim_targets |= SimTarget.MARKER if get_frequency and awg_id.is_out: sim_targets |= SimTarget.FREQUENCY ws = WaveScroller( @@ -173,6 +177,7 @@ def get_snippet( return OutputData( time=ws.time_axis, wave=ws.wave_snippet if awg_id.is_out else ws.acquire_snippet, + marker=ws.marker_snippet, trigger=ws.trigger_snippet, frequency=ws.frequency_snippet, ) diff --git a/laboneq/simulator/seqc_parser.py b/laboneq/simulator/seqc_parser.py index 3892474..60899c0 100644 --- a/laboneq/simulator/seqc_parser.py +++ b/laboneq/simulator/seqc_parser.py @@ -11,7 +11,7 @@ # Note: The simulator may be used as a testing tool, so it must be independent of the production code # Do not add dependencies to the code being tested here (such as compiler, DSL asf.) -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any import numpy as np from numpy import typing as npt @@ -294,9 +294,9 @@ class SeqCDescriptor: sampling_rate: float output_port_delay: float source: str = None - channels: List[int] = None - wave_index: Dict[Any, Any] = None - command_table: List[Any] = None + channels: list[int] = None + wave_index: dict[Any, Any] = None + command_table: list[Any] = None class Operation(Enum): @@ -315,24 +315,24 @@ class SeqCEvent: start_samples: int length_samples: int operation: Operation - args: List[Any] + args: list[Any] @dataclass class WaveRefInfo: assigned_index: int = -1 - wave_data_idx: List[int] = field(default_factory=list) + wave_data_idx: list[int] = field(default_factory=list) length_samples: int = None @dataclass class CommandTableEntryInfo: - abs_phase: Optional[float] = None - rel_phase: Optional[float] = None - abs_amplitude: Optional[float] = None + abs_phase: float | None = None + rel_phase: float | None = None + abs_amplitude: float | None = None @classmethod - def from_ct_entry(cls, ct_entry: Dict): + def from_ct_entry(cls, ct_entry: dict): d = {} if "phase" in ct_entry: # SHFSG @@ -387,9 +387,9 @@ def from_ct_entry(cls, ct_entry: Dict): @dataclass class SeqCSimulation: - events: List[SeqCEvent] = field(default_factory=list) + events: list[SeqCEvent] = field(default_factory=list) device_type: str = "" - waves: List[Any] = field(default_factory=list) + waves: list[Any] = field(default_factory=list) sampling_rate: float = field(default=2.0e9) startup_delay: float = field(default=0.0) output_port_delay: float = field(default=0.0) @@ -400,7 +400,7 @@ def __init__( self, descriptor: SeqCDescriptor, waves, - max_time: Optional[float], + max_time: float | None, ): self.predefined_consts = { "QA_INT_0": 0b1, @@ -470,18 +470,21 @@ def __init__( self.descriptor = descriptor self.waves = waves self.source = preprocess_source(descriptor.source) - self.wave_lookup_by_args: Dict[Any, WaveRefInfo] = {} - self.wave_names_by_index: Dict[int, List[str]] = {} - self.wave_data: List[Any] = [] - self.max_time: Optional[float] = max_time + self.wave_lookup_by_args: dict[Any, WaveRefInfo] = {} + self.wave_names_by_index: dict[int, list[str]] = {} + self.wave_data: list[Any] = [] + self.max_time: float | None = max_time self._oscillator_sweep_config = {} - self._oscillator_sweep_params: Dict[str, Dict[int, float]] = {} + self._oscillator_sweep_params: dict[str, dict[int, float]] = {} + self._command_table_by_index = { + ct["index"]: ct for ct in self.descriptor.command_table + } def _last_played_sample(self) -> int: ev = self.seqc_simulation.events return ev[-1].start_samples + ev[-1].length_samples if len(ev) > 0 else 0 - def _last_play_start_samples(self) -> Tuple[int, int]: + def _last_play_start_samples(self) -> tuple[int, int]: # Returns a time in samples and an index where the next point-in-time event # has to be positioned relative to the last time-span event. This is to support # the SeqC rule of point-in-time events (like startQA) be aligned with the start @@ -519,7 +522,7 @@ def resolve(self, name): def _args2key(self, args): return tuple(tuple(a.items()) if isinstance(a, dict) else a for a in args) - def _update_wave_refs(self, wave_names: List[str], known_wave: WaveRefInfo): + def _update_wave_refs(self, wave_names: list[str], known_wave: WaveRefInfo): known_length = known_wave.length_samples # make VSCode's code parser happy if known_length is not None: return @@ -578,19 +581,30 @@ def _update_wave_refs(self, wave_names: List[str], known_wave: WaveRefInfo): def _append_wave_event( self, - wave_names: List[str], + wave_names: list[str], known_wave: WaveRefInfo, - ct_info: Optional[CommandTableEntryInfo], + ct_info: CommandTableEntryInfo | None, ): self._update_wave_refs(wave_names, known_wave) + uses_marker_1 = any( + ["marker1" in wave for wave in wave_names if wave is not None] + ) + uses_marker_2 = any( + ["marker2" in wave for wave in wave_names if wave is not None] + ) + time_samples = self._last_played_sample() self.seqc_simulation.events.append( SeqCEvent( start_samples=time_samples, length_samples=known_wave.length_samples, operation=Operation.PLAY_WAVE, - args=[known_wave.wave_data_idx, ct_info], + args=[ + known_wave.wave_data_idx, + ct_info, + {"marker1": uses_marker_1, "marker2": uses_marker_2}, + ], ) ) @@ -671,9 +685,7 @@ def executeTableEntry(self, ct_index, latency=None): # todo(JL): Find a better index via the command table offset; take last for now ct_index = self.descriptor.command_table[-1]["index"] - ct_entry = next( - iter(i for i in self.descriptor.command_table if i["index"] == ct_index) - ) + ct_entry = self._command_table_by_index[ct_index] if "waveform" not in ct_entry: return # todo: simulator does not yet support playZero via command table @@ -694,6 +706,10 @@ def executeTableEntry(self, ct_index, latency=None): else: assert False, f"Unknown signal type: {wave['type']}" + for candidate_wave in self.waves.keys(): + if wave["wave_name"] in candidate_wave and "marker" in candidate_wave: + wave_names.append(candidate_wave) + ct_info = CommandTableEntryInfo.from_ct_entry(ct_entry) if latency is not None: @@ -915,9 +931,9 @@ def find_device(recipe, device_uid): def analyze_recipe( recipe, sources, wave_indices, command_tables -) -> List[SeqCDescriptor]: - outputs: Dict[str, List[int]] = {} - seqc_descriptors_from_recipe: Dict[str, SeqCDescriptor] = {} +) -> list[SeqCDescriptor]: + outputs: dict[str, list[int]] = {} + seqc_descriptors_from_recipe: dict[str, SeqCDescriptor] = {} for init in recipe["experiment"]["initializations"]: device_uid = init["device_uid"] device = find_device(recipe, device_uid) @@ -931,22 +947,21 @@ def analyze_recipe( sampling_rate = get_frequency(device_type) startup_delay = -80e-9 if device_type == "HDAWG" and "config" in init: - if "dio_mode" in init["config"]: - dio_mode = init["config"]["dio_mode"] - if dio_mode == "hdawg_leader": - if sampling_rate == 2e9: - startup_delay = -24e-9 - else: - startup_delay = -20e-9 + triggering_mode = init["config"].get("triggering_mode") + if triggering_mode == "desktop_leader": + if sampling_rate == 2e9: + startup_delay = -24e-9 + else: + startup_delay = -20e-9 # TODO(2K): input port_delay previously was not taken into account by the simulator # - keeping it as is for not breaking the tests. To be cleaned up. - input_channel_delays: Dict[int, float] = { + input_channel_delays: dict[int, float] = { i["channel"]: i["scheduler_port_delay"] # + i.get("port_delay", 0.0) for i in init.get("inputs", []) } - output_channel_delays: Dict[int, float] = { + output_channel_delays: dict[int, float] = { o["channel"]: o["scheduler_port_delay"] + o.get("port_delay", 0.0) for o in init.get("outputs", []) } @@ -958,8 +973,15 @@ def analyze_recipe( awg_index = 0 if "awgs" in init: for awg in init["awgs"]: - seqc = awg["seqc"] awg_nr = awg["awg"] + rt_exec_step = next( + iter( + r + for r in recipe["experiment"]["realtime_execution_init"] + if r["device_id"] == device_uid and r["awg_id"] == awg_nr + ) + ) + seqc = rt_exec_step["seqc_ref"] if device_type == "SHFSG" or device_type == "SHFQA": input_channel = awg_nr output_channels = [awg_nr] @@ -995,7 +1017,7 @@ def analyze_recipe( seqc ].output_port_delay += precompensation_delay - channels: List[int] = [ + channels: list[int] = [ output["channel"] for output in init["outputs"] if output["channel"] in output_channels @@ -1008,9 +1030,7 @@ def analyze_recipe( seq_c_wave_indices = {} for wave_index in wave_indices: - wave_seq_c_filename = ( - wave_index["filename"][: -len("_waveindices.csv")] + ".seqc" - ) + wave_seq_c_filename = wave_index["filename"] if len(wave_index["value"]) > 0: seq_c_wave_indices[wave_seq_c_filename] = {} for wave_name, index_value in wave_index["value"].items(): @@ -1094,7 +1114,7 @@ def replace_repeat(match_obj): def _analyze_compiled( compiled: CompiledExperiment, -) -> Tuple[List[SeqCDescriptor], Dict[str, npt.ArrayLike]]: +) -> tuple[list[SeqCDescriptor], dict[str, npt.ArrayLike]]: if isinstance(compiled, dict): compiled = SimpleNamespace( recipe=compiled["recipe"], @@ -1103,7 +1123,10 @@ def _analyze_compiled( wave_indices=compiled["wave_indices"], ) seqc_descriptors = analyze_recipe( - compiled.recipe, compiled.src, compiled.wave_indices, compiled.command_tables + compiled.recipe, + compiled.src, + compiled.wave_indices, + compiled.command_tables, ) read_wave_bin = lambda w: w if w.ndim == 1 else np.array([[s] for s in w]) @@ -1111,9 +1134,9 @@ def _analyze_compiled( return seqc_descriptors, waves -def simulate(compiled: CompiledExperiment, max_time=None) -> Dict[str, SeqCSimulation]: +def simulate(compiled: CompiledExperiment, max_time=None) -> dict[str, SeqCSimulation]: seqc_descriptors, waves = _analyze_compiled(compiled) - results: Dict[str, SeqCSimulation] = {} + results: dict[str, SeqCSimulation] = {} for descriptor in seqc_descriptors: results[descriptor.name] = run_single_source(descriptor, waves, max_time) return results diff --git a/laboneq/simulator/wave_scroller.py b/laboneq/simulator/wave_scroller.py index 83d6535..55319fe 100644 --- a/laboneq/simulator/wave_scroller.py +++ b/laboneq/simulator/wave_scroller.py @@ -5,7 +5,7 @@ import math from enum import Flag -from typing import List, Optional, Set, Tuple +from typing import List, Optional, Set import numpy as np from numpy.typing import ArrayLike @@ -18,12 +18,123 @@ ) +def _overlaps(a_start: int, a_length: int, b_start: int, b_length: int): + """Return True if the first and second sample intervals overlap. + + Intervals that touch are considered to overlap. + + Args: + a_start: The starting sample of the first interval. + a_length: The number of samples in the first interval. + b_start: The starting sample of the second interval. + b_length: The number of samples in the second interval. + + Returns: + True if the intervals overlap. False otherwise. + """ + a_end = a_start + a_length + b_end = b_start + b_length + return a_start <= b_end and b_start <= a_end + + +def _slice_copy( + a: ArrayLike, + a_start: int, + b: ArrayLike, + b_start: int, + b_len: int, +): + """Copy up to ``b_len`` samples from array ``b`` to array ``a`` without exceeding + the end of ``a`` and aligning the two arrays according to their starting samples. + + Args: + a: An array-like to copy values to. + a_start: The starting sample of the destination array. + b: An array-like to copy values from. + b_start: The starting sample of the source array. + b_len: The maximum number of samples to copy from the source array. + + Returns: + Nothing is returned. The destination array is modified inplace. + """ + a_end = a_start + len(a) + b_end = b_start + b_len + if not (a_start < b_end and b_start < a_end): + # handle the case of intervals that don't overlap or only touch + return + + left = max(a_start, b_start) + right = min(a_end, b_end) + + a[left - a_start : right - a_start] = b[left - b_start : right - b_start] + + +def _slice_add( + a: ArrayLike, + a_start: int, + b: ArrayLike, + b_start: int, + b_len: int, +): + """Add up to ``b_len`` samples from array ``b`` to the corresponding entries from + array ``a`` without exceeding the end of ``a`` and aligning the two arrays according + to their starting samples. + + Args: + a: An array-like to add values to. + a_start: The starting sample of the destination array. + b: An array-like to add values from. + b_start: The starting sample of the source array. + b_len: The maximum number of samples to copy from the source array. + + Returns: + Nothing is returned. The destination array is modified inplace. + """ + a_end = a_start + len(a) + b_end = b_start + b_len + if not (a_start < b_end and b_start < a_end): + # handle the case of intervals that don't overlap or only touch + return + + left = max(a_start, b_start) + right = min(a_end, b_end) + + a[left - a_start : right - a_start] += b[left - b_start : right - b_start] + + +def _slice_set(a: ArrayLike, a_start: int, b, b_start: int, b_len: int): + """Set up to ``b_len`` samples in array ``a`` to value ``b`` without exceeding + the end of ``a`` and aligning the intervals according to their starting samples. + + Args: + a: An array like to copy values to. + a_start: The starting sample of the destination array. + b: The value to set. + b_start: The starting sample of the source event. + b_len: The length of the source event. + + Returns: + Nothing is returned. The destination array is modified inplace. + """ + a_end = a_start + len(a) + b_end = b_start + b_len + if not (a_start < b_end and b_start < a_end): + # handle the case of intervals that don't overlap or only touch + return + + left = max(a_start, b_start) + right = min(a_end, b_end) + + a[left - a_start : right - a_start] = b + + class SimTarget(Flag): NONE = 0 PLAY = 1 ACQUIRE = 2 TRIGGER = 4 FREQUENCY = 8 + MARKER = 16 class WaveScroller: @@ -35,12 +146,14 @@ def __init__( sim: SeqCSimulation, ): self.ch = ch - self.sim_targets = sim_targets self.sim = sim self.is_shfqa = sim.device_type == "SHFQA" + self.sim_targets = sim_targets + self.wave_snippet = None + self.marker_snippet = None self.acquire_snippet = None self.trigger_snippet = None self.frequency_snippet = None @@ -50,11 +163,13 @@ def __init__( self.last_trig = 0 self.last_freq_set_samples = 0 self.last_freq = np.nan + self.last_played_value = 0 self.oscillator_phase: Optional[float] = None self.processors = { Operation.PLAY_WAVE: self._process_play_wave, Operation.PLAY_HOLD: self._process_play_hold, + Operation.PLAY_ZERO: self._process_play_zero, Operation.START_QA: self._process_start_qa, Operation.SET_TRIGGER: self._process_set_trigger, Operation.SET_OSC_FREQ: self._process_set_osc_freq, @@ -63,10 +178,15 @@ def __init__( def is_output(self) -> bool: return any( t in self.sim_targets - for t in [SimTarget.PLAY, SimTarget.TRIGGER, SimTarget.FREQUENCY] + for t in [ + SimTarget.PLAY, + SimTarget.TRIGGER, + SimTarget.FREQUENCY, + SimTarget.MARKER, + ] ) - def prepare(self, snippet_length: int): + def prepare(self, snippet_start_samples: int, snippet_length: int): if SimTarget.PLAY in self.sim_targets: if len(self.ch) > 1 or self.is_shfqa: self.wave_snippet = np.zeros(snippet_length, dtype=np.complex128) @@ -76,31 +196,39 @@ def prepare(self, snippet_length: int): self.acquire_snippet = np.zeros(snippet_length, dtype=np.uint8) if SimTarget.TRIGGER in self.sim_targets: self.trigger_snippet = np.zeros(snippet_length, dtype=np.uint8) + if SimTarget.MARKER in self.sim_targets: + self.marker_snippet = np.zeros(snippet_length, dtype=np.complex128) if SimTarget.FREQUENCY in self.sim_targets: self.frequency_snippet = np.full(snippet_length, np.nan, dtype=np.float64) - def target_events(self) -> Set[Operation]: - target_events: Set[Operation] = set() + def target_ops(self) -> Set[Operation]: + target_ops: Set[Operation] = set() if ( SimTarget.ACQUIRE in self.sim_targets or SimTarget.TRIGGER in self.sim_targets or SimTarget.PLAY in self.sim_targets and self.is_shfqa ): - target_events.add(Operation.START_QA) + target_ops.add(Operation.START_QA) if SimTarget.PLAY in self.sim_targets and not self.is_shfqa: - target_events.add(Operation.PLAY_WAVE) - target_events.add(Operation.PLAY_HOLD) + target_ops.add(Operation.PLAY_WAVE) + target_ops.add(Operation.PLAY_HOLD) + target_ops.add(Operation.PLAY_ZERO) if SimTarget.TRIGGER in self.sim_targets: - target_events.add(Operation.SET_TRIGGER) + target_ops.add(Operation.SET_TRIGGER) if SimTarget.FREQUENCY in self.sim_targets: - target_events.add(Operation.SET_OSC_FREQ) - return target_events + target_ops.add(Operation.SET_OSC_FREQ) + return target_ops def _process_play_wave(self, event: SeqCEvent, snippet_start_samples: int): + channels = event.args[0] ct_info: CommandTableEntryInfo = event.args[1] - if len(self.ch) > 1: + if channels[self.ch[0] % 2] is None: + # A None for the first channel represents a FUNCTIONAL pulse + # which is not yet supported by the simulator + return + if len(self.ch) > 1: ct_abs_phase = ct_info.abs_phase if ct_info is not None else None ct_rel_phase = ct_info.rel_phase if ct_info is not None else None if ct_abs_phase is not None: @@ -109,26 +237,71 @@ def _process_play_wave(self, event: SeqCEvent, snippet_start_samples: int): self.oscillator_phase = ( self.oscillator_phase or 0.0 ) + ct_rel_phase / (180 / math.pi) - wave = 1j * self.sim.waves[event.args[0][self.ch[1] % 2]] - wave += self.sim.waves[event.args[0][self.ch[0] % 2]] - # If the command table phase is set, assume that the signal is complex (rather than 2x real) + wave = 1j * self.sim.waves[channels[self.ch[1] % 2]] + wave += self.sim.waves[channels[self.ch[0] % 2]] + + # If the command table phase is set, assume that the signal is complex + # (rather than 2x real) if self.oscillator_phase is not None: wave *= np.exp(-1j * self.oscillator_phase) + else: - wave = self.sim.waves[event.args[0][self.ch[0] % 2]] + wave = self.sim.waves[channels[self.ch[0] % 2]] # Note: CT phase not implemented on RF signals + ct_abs_amplitude = ct_info.abs_amplitude if ct_info is not None else None if ct_abs_amplitude is not None: wave = wave * ct_abs_amplitude - wave_start_samples = event.start_samples - snippet_start_samples - self.wave_snippet[wave_start_samples : wave_start_samples + len(wave)] = wave + + _slice_copy( + self.wave_snippet, + snippet_start_samples, + wave, + event.start_samples, + event.length_samples, + ) + self.last_played_value = wave[event.length_samples - 1] + + markers = event.args[2] if len(event.args) > 2 else {} + + if markers.get("marker1"): + _slice_copy( + self.marker_snippet, + snippet_start_samples, + self.sim.waves[event.args[0][2]], + event.start_samples, + event.length_samples, + ) + + if markers.get("marker2"): + wave_arg_pos = 3 if event.args[2]["marker1"] else 2 + _slice_add( + self.marker_snippet, + snippet_start_samples, + 1j * self.sim.waves[event.args[0][wave_arg_pos]], + event.start_samples, + event.length_samples, + ) def _process_play_hold(self, event: SeqCEvent, snippet_start_samples: int): - wave_start_samples = event.start_samples - snippet_start_samples - self.wave_snippet[ - wave_start_samples : wave_start_samples + event.length_samples - ] = self.wave_snippet[wave_start_samples - 1] + _slice_set( + self.wave_snippet, + snippet_start_samples, + self.last_played_value, + event.start_samples, + event.length_samples, + ) + + def _process_play_zero(self, event: SeqCEvent, snippet_start_samples: int): + _slice_set( + self.wave_snippet, + snippet_start_samples, + 0.0, + event.start_samples, + event.length_samples, + ) + self.last_played_value = 0.0 def _process_start_qa(self, event: SeqCEvent, snippet_start_samples: int): if SimTarget.PLAY in self.sim_targets and self.is_shfqa: @@ -141,15 +314,21 @@ def _process_start_qa(self, event: SeqCEvent, snippet_start_samples: int): def _process_shfqa_gen(self, event: SeqCEvent, snippet_start_samples: int): generator_mask: int = event.args[0] + if self.ch[0] < 0: + # The old_output_simulator sets ChannelInfo("QAResult", -1, SimTarget.ACQUIRE) to + # skip producing the SHFQA acquire play pulse, so we support that here too. + return if (generator_mask & (1 << self.ch[0])) != 0: - wave_start_samples = event.start_samples - snippet_start_samples wave = 1j * self.sim.waves[event.args[4][1]] wave += self.sim.waves[event.args[4][0]] - # TODO(2K): ensure wave doesn't exceed snippet boundary, - # as the wave length is not included in the event length - self.wave_snippet[ - wave_start_samples : wave_start_samples + len(wave) - ] = wave + _slice_copy( + self.wave_snippet, + snippet_start_samples, + wave, + event.start_samples, + event.length_samples, + ) + self.last_played_value = wave[event.length_samples - 1] def _process_acquire(self, event: SeqCEvent, snippet_start_samples: int): if SimTarget.ACQUIRE in self.sim_targets: @@ -162,8 +341,8 @@ def _process_acquire(self, event: SeqCEvent, snippet_start_samples: int): - snippet_start_samples + measurement_delay_samples ) - if wave_start_samples < len(self.acquire_snippet): - self.acquire_snippet[wave_start_samples] = 1.0 + if 0 <= wave_start_samples < len(self.acquire_snippet): + self.acquire_snippet[wave_start_samples] = 1 if SimTarget.TRIGGER in self.sim_targets: trigger_index = 5 if self.is_shfqa else 4 if event.args[trigger_index] is None: @@ -185,27 +364,25 @@ def _process_acquire(self, event: SeqCEvent, snippet_start_samples: int): def _process_set_trigger(self, event: SeqCEvent, snippet_start_samples: int): value: int = int(event.args[0]) wave_start_samples = event.start_samples - snippet_start_samples - if ( - wave_start_samples <= len(self.trigger_snippet) - and self.last_trig is not None - ): + if 0 <= wave_start_samples <= len(self.trigger_snippet): self.trigger_snippet[ self.last_trig_set_samples : wave_start_samples ] = self.last_trig - self.last_trig_set_samples = wave_start_samples + self.last_trig_set_samples = max(0, wave_start_samples) self.last_trig = value def _process_set_osc_freq(self, event: SeqCEvent, snippet_start_samples: int): oscillator: int = event.args[0] - # TODO(2K): Track oscillator switching, currently osc 0 is hard-coded for Hw sweeps + # TODO(2K): Track oscillator switching, currently osc 0 is hard-coded for + # Hw sweeps if oscillator == 0: frequency: float = event.args[1] wave_start_samples = event.start_samples - snippet_start_samples - if wave_start_samples <= len(self.frequency_snippet): + if 0 <= wave_start_samples <= len(self.frequency_snippet): self.frequency_snippet[ self.last_freq_set_samples : wave_start_samples ] = self.last_freq - self.last_freq_set_samples = wave_start_samples + self.last_freq_set_samples = max(0, wave_start_samples) self.last_freq = frequency def process(self, event: SeqCEvent, snippet_start_samples: int): @@ -213,94 +390,75 @@ def process(self, event: SeqCEvent, snippet_start_samples: int): if processor is not None: processor(event, snippet_start_samples) - def trim(self, offset: int, length: int): - if self.wave_snippet is not None: - self.wave_snippet = self.wave_snippet[offset : offset + length] - if self.acquire_snippet is not None: - self.acquire_snippet = self.acquire_snippet[offset : offset + length] + def finalize(self): if self.trigger_snippet is not None: if self.last_trig_set_samples < len(self.trigger_snippet): self.trigger_snippet[self.last_trig_set_samples :] = self.last_trig - self.trigger_snippet = self.trigger_snippet[offset : offset + length] + if self.frequency_snippet is not None: if self.last_freq_set_samples < len(self.frequency_snippet): self.frequency_snippet[self.last_freq_set_samples :] = self.last_freq - self.frequency_snippet = self.frequency_snippet[offset : offset + length] def calc_snippet( self, start_secs: float, length_secs: float, - ) -> Tuple[ArrayLike, ArrayLike]: + ): time_delay_secs = self.sim.output_port_delay if self.is_output() else 0.0 time_delay_secs += self.sim.startup_delay - target_events = self.target_events() start_samples = int( np.round((start_secs - time_delay_secs) * self.sim.sampling_rate) ) length_samples = int(np.round(length_secs * self.sim.sampling_rate)) + if start_samples < 0: + # truncate any part of the interval that extends into negative + # sample counts (there are no events with negative samples) + length_samples = max(0, length_samples + start_samples) + start_samples = 0 end_samples = start_samples + length_samples - def overlaps(a_start, a_length, b_start, b_length): - return ( - min(a_start + a_length, b_start + b_length) - max(a_length, b_length) - != 0 - ) - - max_event_idx = next( - ( - i - for i, e in enumerate(self.sim.events) - if e.start_samples > start_samples + length_samples - ), - None, - ) + # filter relevant events into events pre-interval and events that + # overlap the interval, keeping only the last of each kind of + # operation + pre_events = {} + interval_events = [] + target_ops = self.target_ops() + for ev in self.sim.events: + if ev.start_samples > end_samples: + break + if ev.operation not in target_ops: + continue + if _overlaps( + start_samples, + length_samples, + ev.start_samples, + ev.length_samples, + ): + interval_events.append(ev) + else: + pre_events[ev.operation] = ev + pre_events = sorted(pre_events.values(), key=lambda ev: ev.start_samples) - events_in_window = [ - ev - for ev in self.sim.events[:max_event_idx] - if overlaps( - start_samples, length_samples, ev.start_samples, ev.length_samples - ) - ] - if len(events_in_window): - snippet_start_samples = min(ev.start_samples for ev in events_in_window) - snippet_length = ( - max( - # in case the last event had zero length, add one sample so that - # for example a final setTrigger(0) can take effect and set the - # last sample to 0 - ev.start_samples + (ev.length_samples or 1) - for ev in events_in_window - ) - - snippet_start_samples + # truncate sample length to the end of the last contained event + if interval_events: + ev_end = max( + ev.start_samples + (ev.length_samples or 1) for ev in interval_events ) + end_samples = min(ev_end, end_samples) + length_samples = end_samples - start_samples else: - snippet_start_samples = start_samples - snippet_length = length_samples - - op_events = [ev for ev in events_in_window if ev.operation in target_events] - self.prepare(snippet_length) + end_samples = start_samples + length_samples = 0 - for ev in op_events: - self.process(ev, snippet_start_samples) - - # clip to actually available samples, even if wider range requested - end_samples = min(end_samples, snippet_start_samples + snippet_length) - start_samples = max(0, start_samples) - length_samples = end_samples - start_samples - if length_samples <= 0: - return np.array([]), np.array([]) + # prepare and populate the snippets + self.prepare(start_samples, length_samples) + for ev in pre_events: + self.process(ev, start_samples) + for ev in interval_events: + self.process(ev, start_samples) + self.finalize() exact_start_secs = start_samples / self.sim.sampling_rate + time_delay_secs - ofs = start_samples - snippet_start_samples - - if ofs > 0: - self.trim(ofs, length_samples) - else: - self.trim(0, length_samples) - exact_start_secs -= ofs / self.sim.sampling_rate - exact_length_secs = (length_samples - 1) / self.sim.sampling_rate self.time_axis = np.linspace( exact_start_secs, exact_start_secs + exact_length_secs, length_samples diff --git a/pyproject.toml b/pyproject.toml index 66f93be..99a67ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,9 +48,12 @@ dependencies = [ "python-box", "pyyaml", "requests", + "rich", "rustworkx", "scipy", + "sqlitedict", "sortedcollections", + "lagom", "zhinst-core==23.2.42414", "zhinst-toolkit~=0.5.0", "zhinst-utils~=0.3.0", @@ -116,7 +119,6 @@ fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN unfixable = [] exclude = [ - "tests/integration/dsl_v3", ".eggs", ".git", ".ruff_cache", diff --git a/requirements-dev.txt b/requirements-dev.txt index fc533a7..d644865 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -18,7 +18,7 @@ pyvisa-py==0.5.3 qcodes # Formatting (must match version in CI) -black==22.12.0 +black[jupyter]==22.10 pre-commit isort>=5.12.0 ruff==0.0.264