Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: m-labs/artiq
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: 9e64f7dc3ae5
Choose a base ref
...
head repository: m-labs/artiq
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: fbf94f9b6d1f
Choose a head ref
  • 10 commits
  • 35 files changed
  • 1 contributor

Commits on Oct 12, 2015

  1. Copy the full SHA
    97accd2 View commit details
  2. Copy the full SHA
    3923dd8 View commit details
  3. gui: dataset search

    sbourdeauducq committed Oct 12, 2015
    Copy the full SHA
    e6e93ab View commit details
  4. Copy the full SHA
    22bffa9 View commit details
  5. Copy the full SHA
    a83ffb3 View commit details
  6. Copy the full SHA
    a754d4b View commit details
  7. Copy the full SHA
    b51910f View commit details
  8. fix imports

    sbourdeauducq committed Oct 12, 2015
    Copy the full SHA
    5c4ed7a View commit details
  9. Copy the full SHA
    3cec176 View commit details
  10. Copy the full SHA
    fbf94f9 View commit details
51 changes: 27 additions & 24 deletions artiq/frontend/artiq_client.py
Original file line number Diff line number Diff line change
@@ -12,6 +12,7 @@
from artiq.protocols.pc_rpc import Client
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols import pyon
from artiq.tools import short_format


def clear_screen():
@@ -63,24 +64,26 @@ def get_argparser():
parser_delete.add_argument("rid", type=int,
help="run identifier (RID)")

parser_set_parameter = subparsers.add_parser(
"set-parameter", help="add or modify a parameter")
parser_set_parameter.add_argument("name", help="name of the parameter")
parser_set_parameter.add_argument("value",
help="value in PYON format")
parser_set_dataset = subparsers.add_parser(
"set-dataset", help="add or modify a dataset")
parser_set_dataset.add_argument("name", help="name of the dataset")
parser_set_dataset.add_argument("value",
help="value in PYON format")
parser_set_dataset.add_argument("-p", "--persist", action="store_true",
help="make the dataset persistent")

parser_del_parameter = subparsers.add_parser(
"del-parameter", help="delete a parameter")
parser_del_parameter.add_argument("name", help="name of the parameter")
parser_del_dataset = subparsers.add_parser(
"del-dataset", help="delete a dataset")
parser_del_dataset.add_argument("name", help="name of the dataset")

parser_show = subparsers.add_parser(
"show", help="show schedule, log, devices or parameters")
"show", help="show schedule, log, devices or datasets")
parser_show.add_argument(
"what",
help="select object to show: schedule/log/devices/parameters")
help="select object to show: schedule/log/devices/datasets")

subparsers.add_parser(
"scan-ddb", help="trigger a device database (re)scan")
"scan-devices", help="trigger a device database (re)scan")

parser_scan_repos = subparsers.add_parser(
"scan-repository", help="trigger a repository (re)scan")
@@ -129,15 +132,15 @@ def _action_delete(remote, args):
remote.delete(args.rid)


def _action_set_parameter(remote, args):
remote.set(args.name, pyon.decode(args.value))
def _action_set_dataset(remote, args):
remote.set(args.name, pyon.decode(args.value), args.persist)


def _action_del_parameter(remote, args):
def _action_del_dataset(remote, args):
remote.delete(args.name)


def _action_scan_ddb(remote, args):
def _action_scan_devices(remote, args):
remote.scan()


@@ -186,11 +189,11 @@ def _show_devices(devices):
print(table)


def _show_parameters(parameters):
def _show_datasets(datasets):
clear_screen()
table = PrettyTable(["Parameter", "Value"])
for k, v in sorted(parameters.items(), key=itemgetter(0)):
table.add_row([k, str(v)])
table = PrettyTable(["Dataset", "Persistent", "Value"])
for k, (persist, value) in sorted(datasets.items(), key=itemgetter(0)):
table.add_row([k, "Y" if persist else "N", short_format(value)])
print(table)


@@ -259,8 +262,8 @@ def main():
_show_log(args)
elif args.what == "devices":
_show_dict(args, "devices", _show_devices)
elif args.what == "parameters":
_show_dict(args, "parameters", _show_parameters)
elif args.what == "datasets":
_show_dict(args, "datasets", _show_datasets)
else:
print("Unknown object to show, use -h to list valid names.")
sys.exit(1)
@@ -269,9 +272,9 @@ def main():
target_name = {
"submit": "master_schedule",
"delete": "master_schedule",
"set_parameter": "master_pdb",
"del_parameter": "master_pdb",
"scan_ddb": "master_ddb",
"set_dataset": "master_dataset_db",
"del_dataset": "master_dataset_db",
"scan_devices": "master_device_db",
"scan_repository": "master_repository"
}[action]
remote = Client(args.server, port, target_name)
20 changes: 10 additions & 10 deletions artiq/frontend/artiq_compile.py
Original file line number Diff line number Diff line change
@@ -3,8 +3,8 @@
import logging
import argparse

from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DeviceManager
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.worker_db import DeviceManager, DatasetManager
from artiq.tools import *


@@ -15,10 +15,10 @@ def get_argparser():
parser = argparse.ArgumentParser(description="ARTIQ static compiler")

verbosity_args(parser)
parser.add_argument("-d", "--ddb", default="ddb.pyon",
help="device database file")
parser.add_argument("-p", "--pdb", default="pdb.pyon",
help="parameter database file")
parser.add_argument("--device-db", default="device_db.pyon",
help="device database file (default: '%(default)s')")
parser.add_argument("--dataset-db", default="dataset_db.pyon",
help="dataset file (default: '%(default)s')")

parser.add_argument("-e", "--experiment", default=None,
help="experiment to compile")
@@ -36,14 +36,14 @@ def main():
args = get_argparser().parse_args()
init_logger(args)

dmgr = DeviceManager(FlatFileDB(args.ddb))
pdb = FlatFileDB(args.pdb)
device_mgr = DeviceManager(DeviceDB(args.device_db))
dataset_mgr = DatasetManager(DatasetDB(args.dataset_db))

try:
module = file_import(args.file)
exp = get_experiment(module, args.experiment)
arguments = parse_arguments(args.arguments)
exp_inst = exp(dmgr, pdb, **arguments)
exp_inst = exp(device_mgr, dataset_mgr, **arguments)

if (not hasattr(exp.run, "k_function_info")
or not exp.run.k_function_info):
@@ -55,7 +55,7 @@ def main():
[exp_inst], {},
with_attr_writeback=False)
finally:
dmgr.close_devices()
device_mgr.close_devices()

if rpc_map:
raise ValueError("Experiment must not use RPC")
12 changes: 6 additions & 6 deletions artiq/frontend/artiq_coretool.py
Original file line number Diff line number Diff line change
@@ -2,8 +2,8 @@

import argparse

from artiq.master.databases import DeviceDB
from artiq.master.worker_db import DeviceManager
from artiq.protocols.file_db import FlatFileDB


def to_bytes(string):
@@ -13,8 +13,8 @@ def to_bytes(string):
def get_argparser():
parser = argparse.ArgumentParser(description="ARTIQ core device "
"remote access tool")
parser.add_argument("--ddb", default="ddb.pyon",
help="device database file")
parser.add_argument("--device-db", default="device_db.pyon",
help="device database file (default: '%(default)s')")

subparsers = parser.add_subparsers(dest="action")
subparsers.required = True
@@ -58,9 +58,9 @@ def get_argparser():

def main():
args = get_argparser().parse_args()
dmgr = DeviceManager(FlatFileDB(args.ddb))
device_mgr = DeviceManager(DeviceDB(args.device_db))
try:
comm = dmgr.get("comm")
comm = device_mgr.get("comm")

if args.action == "log":
print(comm.get_log())
@@ -82,7 +82,7 @@ def main():
elif args.action == "cfg-erase":
comm.flash_storage_erase()
finally:
dmgr.close_devices()
device_mgr.close_devices()

if __name__ == "__main__":
main()
43 changes: 19 additions & 24 deletions artiq/frontend/artiq_gui.py
Original file line number Diff line number Diff line change
@@ -15,8 +15,7 @@
from artiq.gui.state import StateManager
from artiq.gui.explorer import ExplorerDock
from artiq.gui.moninj import MonInj
from artiq.gui.results import ResultsDock
from artiq.gui.parameters import ParametersDock
from artiq.gui.datasets import DatasetsDock
from artiq.gui.schedule import ScheduleDock
from artiq.gui.log import LogDock
from artiq.gui.console import ConsoleDock
@@ -92,30 +91,24 @@ def main():
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close()))

d_results = ResultsDock(win, area)
smgr.register(d_results)
loop.run_until_complete(d_results.sub_connect(
d_datasets = DatasetsDock(win, area)
smgr.register(d_datasets)
loop.run_until_complete(d_datasets.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_results.sub_close()))
atexit.register(lambda: loop.run_until_complete(d_datasets.sub_close()))

if os.name != "nt":
d_ttl_dds = MonInj()
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop()))

d_params = ParametersDock()
loop.run_until_complete(d_params.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_params.sub_close()))

if os.name != "nt":
area.addDock(d_ttl_dds.dds_dock, "top")
area.addDock(d_ttl_dds.ttl_dock, "above", d_ttl_dds.dds_dock)
area.addDock(d_results, "above", d_ttl_dds.ttl_dock)
area.addDock(d_datasets, "above", d_ttl_dds.ttl_dock)
else:
area.addDock(d_results, "top")
area.addDock(d_params, "above", d_results)
area.addDock(d_explorer, "above", d_params)
area.addDock(d_datasets, "top")
area.addDock(d_explorer, "above", d_datasets)

d_schedule = ScheduleDock(status_bar, schedule_ctl)
loop.run_until_complete(d_schedule.sub_connect(
@@ -127,16 +120,18 @@ def main():
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))

pdb = AsyncioClient()
loop.run_until_complete(pdb.connect_rpc(
args.server, args.port_control, "master_pdb"))
atexit.register(lambda: pdb.close_rpc())
def _get_parameter(k, v):
asyncio.ensure_future(pdb.set(k, v))
dataset_db = AsyncioClient()
loop.run_until_complete(dataset_db.connect_rpc(
args.server, args.port_control, "master_dataset_db"))
atexit.register(lambda: dataset_db.close_rpc())
def _set_dataset(k, v):
asyncio.ensure_future(dataset_db.set(k, v))
def _del_dataset(k):
asyncio.ensure_future(dataset_db.delete(k))
d_console = ConsoleDock(
d_params.get_parameter,
_get_parameter,
d_results.get_result)
d_datasets.get_dataset,
_set_dataset,
_del_dataset)

area.addDock(d_console, "bottom")
area.addDock(d_log, "above", d_console)
28 changes: 22 additions & 6 deletions artiq/frontend/artiq_influxdb.py
Original file line number Diff line number Diff line change
@@ -93,7 +93,7 @@ def update(self, k, v):
try:
self._queue.put_nowait((k, v))
except asyncio.QueueFull:
logger.warning("failed to update parameter '%s': "
logger.warning("failed to update dataset '%s': "
"too many pending updates", k)

async def _do(self):
@@ -103,7 +103,7 @@ async def _do(self):
params = {"u": self.user, "p": self.password, "db": self.database,
"consistency": "any", "precision": "n"}
fmt_ty, fmt_v = format_influxdb(v)
data = "{},parameter={} {}={}".format(self.table, k, fmt_ty, fmt_v)
data = "{},dataset={} {}={}".format(self.table, k, fmt_ty, fmt_v)
try:
response = await aiohttp.request(
"POST", url, params=params, data=data)
@@ -121,15 +121,31 @@ async def _do(self):
response.close()


class Parameters:
class _Mock:
def __setitem__(self, k, v):
pass

def __getitem__(self, k):
return self

def __delitem__(self, k):
pass


class Datasets:
def __init__(self, filter_function, writer, init):
self.filter_function = filter_function
self.writer = writer

def __setitem__(self, k, v):
if self.filter_function(k):
self.writer.update(k, v)
self.writer.update(k, v[1])

# ignore mutations
def __getitem__(self, k):
return _Mock()

# ignore deletions
def __delitem__(self, k):
pass

@@ -145,8 +161,8 @@ def __init__(self, server, port, retry, filter_function, writer):

async def _do(self):
subscriber = Subscriber(
"parameters",
partial(Parameters, self.filter_function, self.writer))
"datasets",
partial(Datasets, self.filter_function, self.writer))
while True:
try:
await subscriber.connect(self.server, self.port)
Loading