Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: m-labs/artiq
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: ae99af27ee10
Choose a base ref
...
head repository: m-labs/artiq
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: 4166f4e92891
Choose a head ref
  • 4 commits
  • 12 files changed
  • 1 contributor

Commits on Nov 11, 2015

  1. Copy the full SHA
    c3f99ed View commit details
  2. Copy the full SHA
    77330c7 View commit details
  3. Copy the full SHA
    69f5e37 View commit details
  4. Copy the full SHA
    4166f4e View commit details
10 changes: 5 additions & 5 deletions artiq/frontend/artiq_ctlmgr.py
Original file line number Diff line number Diff line change
@@ -15,7 +15,7 @@
from artiq.protocols.logging import (LogForwarder,
parse_log_message, log_with_name,
SourceFilter)
from artiq.tools import TaskObject, Condition
from artiq.tools import TaskObject, Condition, atexit_register_coroutine


logger = logging.getLogger(__name__)
@@ -311,27 +311,27 @@ def main():
asyncio.set_event_loop(loop)
else:
loop = asyncio.get_event_loop()
atexit.register(lambda: loop.close())
atexit.register(loop.close)

logfwd = LogForwarder(args.server, args.port_logging,
args.retry_master)
logfwd.addFilter(source_adder)
root_logger.addHandler(logfwd)
logfwd.start()
atexit.register(lambda: loop.run_until_complete(logfwd.stop()))
atexit_register_coroutine(logfwd.stop)

ctlmgr = ControllerManager(args.server, args.port_notify,
args.retry_master)
ctlmgr.start()
atexit.register(lambda: loop.run_until_complete(ctlmgr.stop()))
atexit_register_coroutine(ctlmgr.stop)

class CtlMgrRPC:
retry_now = ctlmgr.retry_now

rpc_target = CtlMgrRPC()
rpc_server = Server({"ctlmgr": rpc_target}, builtin_terminate=True)
loop.run_until_complete(rpc_server.start(args.bind, args.bind_port))
atexit.register(lambda: loop.run_until_complete(rpc_server.stop()))
atexit_register_coroutine(rpc_server.stop)

loop.run_until_complete(rpc_server.wait_terminate())

60 changes: 28 additions & 32 deletions artiq/frontend/artiq_gui.py
Original file line number Diff line number Diff line change
@@ -10,15 +10,10 @@
from quamash import QEventLoop, QtGui, QtCore
from pyqtgraph import dockarea

from artiq.tools import verbosity_args, init_logger, artiq_dir
from artiq.tools import *
from artiq.protocols.pc_rpc import AsyncioClient
from artiq.gui.state import StateManager
from artiq.gui.explorer import ExplorerDock
from artiq.gui.moninj import MonInj
from artiq.gui.datasets import DatasetsDock
from artiq.gui.schedule import ScheduleDock
from artiq.gui.log import LogDock
from artiq.gui.console import ConsoleDock
from artiq.gui.models import ModelSubscriber
from artiq.gui import state, explorer, moninj, datasets, schedule, log, console


def get_argparser():
@@ -74,7 +69,18 @@ def main():
atexit.register(client.close_rpc)
rpc_clients[target] = client

smgr = StateManager(args.db_file)
sub_clients = dict()
for notifier_name, module in (("explist", explorer),
("datasets", datasets),
("schedule", schedule),
("log", log)):
subscriber = ModelSubscriber(notifier_name, module.Model)
loop.run_until_complete(subscriber.connect(
args.server, args.port_notify))
atexit_register_coroutine(subscriber.close)
sub_clients[notifier_name] = subscriber

smgr = state.StateManager(args.db_file)

win = MainWindow(app, args.server)
area = dockarea.DockArea()
@@ -85,24 +91,20 @@ def main():
status_bar.showMessage("Connected to {}".format(args.server))
win.setStatusBar(status_bar)

d_explorer = ExplorerDock(win, status_bar,
rpc_clients["schedule"],
rpc_clients["repository"])
d_explorer = explorer.ExplorerDock(win, status_bar,
sub_clients["explist"],
sub_clients["schedule"],
rpc_clients["schedule"],
rpc_clients["repository"])
smgr.register(d_explorer)
loop.run_until_complete(d_explorer.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close()))

d_datasets = DatasetsDock(win, area)
d_datasets = datasets.DatasetsDock(win, area, sub_clients["datasets"])
smgr.register(d_datasets)
loop.run_until_complete(d_datasets.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_datasets.sub_close()))

if os.name != "nt":
d_ttl_dds = MonInj()
d_ttl_dds = moninj.MonInj()
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop()))
atexit_register_coroutine(d_ttl_dds.stop)

if os.name != "nt":
area.addDock(d_ttl_dds.dds_dock, "top")
@@ -112,23 +114,17 @@ def main():
area.addDock(d_datasets, "top")
area.addDock(d_explorer, "above", d_datasets)

d_schedule = ScheduleDock(status_bar, rpc_clients["schedule"])
loop.run_until_complete(d_schedule.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_schedule.sub_close()))
d_explorer.get_current_schedule = d_schedule.get_current_schedule
d_schedule = schedule.ScheduleDock(
status_bar, rpc_clients["schedule"], sub_clients["schedule"])

d_log = LogDock()
d_log = log.LogDock(sub_clients["log"])
smgr.register(d_log)
loop.run_until_complete(d_log.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))

def _set_dataset(k, v):
asyncio.ensure_future(rpc_clients["dataset_db"].set(k, v))
def _del_dataset(k):
asyncio.ensure_future(rpc_clients["dataset_db"].delete(k))
d_console = ConsoleDock(
d_console = console.ConsoleDock(
d_datasets.get_dataset,
_set_dataset,
_del_dataset)
@@ -139,7 +135,7 @@ def _del_dataset(k):

smgr.load()
smgr.start()
atexit.register(lambda: loop.run_until_complete(smgr.stop()))
atexit_register_coroutine(smgr.stop)
win.show()
loop.run_until_complete(win.exit_request.wait())

11 changes: 5 additions & 6 deletions artiq/frontend/artiq_influxdb.py
Original file line number Diff line number Diff line change
@@ -10,8 +10,7 @@
import numpy as np
import aiohttp

from artiq.tools import verbosity_args, init_logger
from artiq.tools import TaskObject
from artiq.tools import *
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols.pc_rpc import Server
from artiq.protocols import pyon
@@ -239,23 +238,23 @@ def main():
init_logger(args)

loop = asyncio.get_event_loop()
atexit.register(lambda: loop.close())
atexit.register(loop.close)

writer = DBWriter(args.baseurl_db,
args.user_db, args.password_db,
args.database, args.table)
writer.start()
atexit.register(lambda: loop.run_until_complete(writer.stop()))
atexit_register_coroutine(writer.stop)

filter = Filter(args.pattern_file)
rpc_server = Server({"influxdb_filter": filter}, builtin_terminate=True)
loop.run_until_complete(rpc_server.start(args.bind, args.bind_port))
atexit.register(lambda: loop.run_until_complete(rpc_server.stop()))
atexit_register_coroutine(rpc_server.stop)

reader = MasterReader(args.server_master, args.port_master,
args.retry_master, filter._filter, writer)
reader.start()
atexit.register(lambda: loop.run_until_complete(reader.stop()))
atexit_register_coroutine(reader.stop)

loop.run_until_complete(rpc_server.wait_terminate())

13 changes: 7 additions & 6 deletions artiq/frontend/artiq_master.py
Original file line number Diff line number Diff line change
@@ -5,6 +5,7 @@
import atexit
import os

from artiq.tools import atexit_register_coroutine
from artiq.protocols.pc_rpc import Server as RPCServer
from artiq.protocols.sync_struct import Publisher
from artiq.protocols.logging import Server as LoggingServer
@@ -59,12 +60,12 @@ def main():
asyncio.set_event_loop(loop)
else:
loop = asyncio.get_event_loop()
atexit.register(lambda: loop.close())
atexit.register(loop.close)

device_db = DeviceDB(args.device_db)
dataset_db = DatasetDB(args.dataset_db)
dataset_db.start()
atexit.register(lambda: loop.run_until_complete(dataset_db.stop()))
atexit_register_coroutine(dataset_db.stop)

if args.git:
repo_backend = GitBackend(args.repository)
@@ -90,7 +91,7 @@ def main():
"scheduler_get_status": scheduler.get_status
})
scheduler.start()
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
atexit_register_coroutine(scheduler.stop)

server_control = RPCServer({
"master_device_db": device_db,
@@ -100,7 +101,7 @@ def main():
})
loop.run_until_complete(server_control.start(
args.bind, args.port_control))
atexit.register(lambda: loop.run_until_complete(server_control.stop()))
atexit_register_coroutine(server_control.stop)

server_notify = Publisher({
"schedule": scheduler.notifier,
@@ -111,12 +112,12 @@ def main():
})
loop.run_until_complete(server_notify.start(
args.bind, args.port_notify))
atexit.register(lambda: loop.run_until_complete(server_notify.stop()))
atexit_register_coroutine(server_notify.stop)

server_logging = LoggingServer()
loop.run_until_complete(server_logging.start(
args.bind, args.port_logging))
atexit.register(lambda: loop.run_until_complete(server_logging.stop()))
atexit_register_coroutine(server_logging.stop)

loop.run_forever()

32 changes: 13 additions & 19 deletions artiq/gui/datasets.py
Original file line number Diff line number Diff line change
@@ -7,9 +7,8 @@
from pyqtgraph import dockarea
from pyqtgraph import LayoutWidget

from artiq.protocols.sync_struct import Subscriber
from artiq.tools import short_format
from artiq.gui.tools import DictSyncModel
from artiq.gui.models import DictSyncModel
from artiq.gui.displays import *

try:
@@ -21,10 +20,9 @@
logger = logging.getLogger(__name__)


class DatasetsModel(DictSyncModel):
def __init__(self, parent, init):
DictSyncModel.__init__(self, ["Dataset", "Persistent", "Value"],
parent, init)
class Model(DictSyncModel):
def __init__(self, init):
DictSyncModel.__init__(self, ["Dataset", "Persistent", "Value"], init)

def sort_key(self, k, v):
return k
@@ -47,7 +45,7 @@ def _get_display_type_name(display_cls):


class DatasetsDock(dockarea.Dock):
def __init__(self, dialog_parent, dock_area):
def __init__(self, dialog_parent, dock_area, datasets_sub):
dockarea.Dock.__init__(self, "Datasets", size=(1500, 500))
self.dialog_parent = dialog_parent
self.dock_area = dock_area
@@ -66,6 +64,9 @@ def __init__(self, dialog_parent, dock_area):
QtGui.QHeaderView.ResizeToContents)
grid.addWidget(self.table, 1, 0)

self.table_model = Model(dict())
datasets_sub.add_setmodel_callback(self.set_model)

add_display_box = QtGui.QGroupBox("Add display")
grid.addWidget(add_display_box, 1, 1)
display_grid = QtGui.QGridLayout()
@@ -79,25 +80,18 @@ def __init__(self, dialog_parent, dock_area):
self.displays = dict()

def _search_datasets(self):
self.table_model_filter.setFilterFixedString(self.search.displayText())
if hasattr(self, "table_model_filter"):
self.table_model_filter.setFilterFixedString(
self.search.displayText())

def get_dataset(self, key):
return self.table_model.backing_store[key][1]

async def sub_connect(self, host, port):
self.subscriber = Subscriber("datasets", self.init_datasets_model,
self.on_mod)
await self.subscriber.connect(host, port)

async def sub_close(self):
await self.subscriber.close()

def init_datasets_model(self, init):
self.table_model = DatasetsModel(self.table, init)
def set_model(self, model):
self.table_model = model
self.table_model_filter = QSortFilterProxyModel()
self.table_model_filter.setSourceModel(self.table_model)
self.table.setModel(self.table_model_filter)
return self.table_model

def update_display_data(self, dsp):
filtered_data = {k: self.table_model.backing_store[k][1]
Loading