Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: m-labs/artiq
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: 1a513634ff71
Choose a base ref
...
head repository: m-labs/artiq
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: 84d50c4caf48
Choose a head ref
  • 2 commits
  • 6 files changed
  • 1 contributor

Commits on Mar 29, 2016

  1. style

    sbourdeauducq committed Mar 29, 2016
    Copy the full SHA
    a545598 View commit details
  2. environment,worker_db: mutate datasets from experiments via dedicated…

    … method instead of Notifier. Closes #345
    sbourdeauducq committed Mar 29, 2016
    Copy the full SHA
    84d50c4 View commit details
Showing with 45 additions and 27 deletions.
  1. +2 −0 RELEASE_NOTES.rst
  2. +2 −2 artiq/coredevice/core.py
  3. +3 −3 artiq/coredevice/dds.py
  4. +16 −6 artiq/language/environment.py
  5. +14 −8 artiq/master/worker_db.py
  6. +8 −8 examples/master/repository/flopping_f_simulation.py
2 changes: 2 additions & 0 deletions RELEASE_NOTES.rst
Original file line number Diff line number Diff line change
@@ -9,6 +9,8 @@ unreleased
* The CPU speed in the pipistrello gateware has been reduced from 83 1/3 MHz to
75 MHz. This will reduce the achievable sustained pulse rate and latency
accordingly. ISE was intermittently failing to meet timing (#341).
* set_dataset in broadcast mode no longer returns a Notifier. Mutating datasets
should be done with mutate_dataset instead (#345).


1.0rc1
4 changes: 2 additions & 2 deletions artiq/coredevice/core.py
Original file line number Diff line number Diff line change
@@ -60,8 +60,8 @@ class Core:
"""

kernel_constant_attributes = {
'core', 'ref_period', 'coarse_ref_period', 'ref_multiplier',
'external_clock',
"core", "ref_period", "coarse_ref_period", "ref_multiplier",
"external_clock",
}

def __init__(self, dmgr, ref_period, external_clock=False,
6 changes: 3 additions & 3 deletions artiq/coredevice/dds.py
Original file line number Diff line number Diff line change
@@ -29,7 +29,7 @@ def dds_batch_exit() -> TNone:


class _BatchContextManager:
kernel_constant_attributes = {'core', 'core_dds'}
kernel_constant_attributes = {"core", "core_dds"}

def __init__(self, core_dds):
self.core_dds = core_dds
@@ -53,7 +53,7 @@ class CoreDDS:
phase-locked multiple of the RTIO clock.
"""

kernel_constant_attributes = {'core', 'sysclk', 'batch'}
kernel_constant_attributes = {"core", "sysclk", "batch"}

def __init__(self, dmgr, sysclk, core_device="core"):
self.core = dmgr.get(core_device)
@@ -89,7 +89,7 @@ class _DDSGeneric:
"""

kernel_constant_attributes = {
'core', 'core_dds', 'bus_channel', 'channel', 'pow_width'
"core", "core_dds", "bus_channel", "channel", "pow_width"
}

def __init__(self, dmgr, bus_channel, channel, core_dds_device="core_dds"):
22 changes: 16 additions & 6 deletions artiq/language/environment.py
Original file line number Diff line number Diff line change
@@ -221,13 +221,11 @@ def set_dataset(self, key, value,
broadcast=False, persist=False, save=True):
"""Sets the contents and handling modes of a dataset.
If the dataset is broadcasted, it must be PYON-serializable.
If the dataset is saved, it must be a scalar (``bool``, ``int``,
``float`` or NumPy scalar) or a NumPy array.
Datasets must be scalars (``bool``, ``int``, ``float`` or NumPy scalar)
or NumPy arrays.
:param broadcast: the data is sent in real-time to the master, which
dispatches it. Returns a Notifier that can be used to mutate the
dataset.
dispatches it.
:param persist: the master should store the data on-disk. Implies
broadcast.
:param save: the data is saved into the local storage of the current
@@ -238,7 +236,19 @@ def set_dataset(self, key, value,
return
if self.__dataset_mgr is None:
raise ValueError("Dataset manager not present")
return self.__dataset_mgr.set(key, value, broadcast, persist, save)
self.__dataset_mgr.set(key, value, broadcast, persist, save)

def mutate_dataset(self, key, index, value):
"""Mutate an existing dataset at the given index (e.g. set a value at
a given position in a NumPy array)
If the dataset was created in broadcast mode, the modification is
immediately transmitted."""
if self.__parent is not None:
self.__parent.mutate_dataset(key, index, value)
if self.__dataset_mgr is None:
raise ValueError("Dataset manager not present")
self.__dataset_mgr.mutate(key, index, value)

def get_dataset(self, key, default=NoDefault):
"""Returns the contents of a dataset.
22 changes: 14 additions & 8 deletions artiq/master/worker_db.py
Original file line number Diff line number Diff line change
@@ -228,20 +228,26 @@ def __init__(self, ddb):
def set(self, key, value, broadcast=False, persist=False, save=True):
if persist:
broadcast = True
r = None
if broadcast:
self.broadcast[key] = (persist, value)
r = self.broadcast[key][1]
self.broadcast[key] = persist, value
if save:
self.local[key] = value
return r

def mutate(self, key, index, value):
target = None
if key in self.local:
target = self.local[key]
if key in self.broadcast.read:
target = self.broadcast[key][1]
if target is None:
raise KeyError("Cannot mutate non-existing dataset")
target[index] = value

def get(self, key):
try:
if key in self.local:
return self.local[key]
except KeyError:
pass
return self.ddb.get(key)
else:
return self.ddb.get(key)

def write_hdf5(self, f):
result_dict_to_hdf5(f, self.local)
16 changes: 8 additions & 8 deletions examples/master/repository/flopping_f_simulation.py
Original file line number Diff line number Diff line change
@@ -38,19 +38,19 @@ def build(self):

def run(self):
l = len(self.frequency_scan)
frequency = self.set_dataset("flopping_f_frequency",
np.full(l, np.nan),
broadcast=True, save=False)
brightness = self.set_dataset("flopping_f_brightness",
np.full(l, np.nan),
broadcast=True)
self.set_dataset("flopping_f_frequency",
np.full(l, np.nan),
broadcast=True, save=False)
self.set_dataset("flopping_f_brightness",
np.full(l, np.nan),
broadcast=True)
self.set_dataset("flopping_f_fit", np.full(l, np.nan),
broadcast=True, save=False)

for i, f in enumerate(self.frequency_scan):
m_brightness = model(f, self.F0) + self.noise_amplitude*random.random()
frequency[i] = f
brightness[i] = m_brightness
self.mutate_dataset("flopping_f_frequency", i, f)
self.mutate_dataset("flopping_f_brightness", i, m_brightness)
time.sleep(0.1)
self.scheduler.submit(self.scheduler.pipeline_name, self.scheduler.expid,
self.scheduler.priority, time.time() + 20, False)