diff --git a/.flake8 b/.flake8 new file mode 100644 index 000000000..7acc4945b --- /dev/null +++ b/.flake8 @@ -0,0 +1,10 @@ +[flake8] +max_line_length = 80 +ignore = E501 +in-place = true +recursive = true +aggressive = 2 +exclude = + agents/meinberg_m1000/mibs/MBG-SNMP-LTNG-MIB.py + agents/meinberg_m1000/mibs/SNMPv2-MIB.py + agents/meinberg_m1000/mibs/MBG-SNMP-ROOT-MIB.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..a975232ae --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,9 @@ +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 320a51357..930af92cc 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -6,67 +6,7 @@ on: jobs: test: - name: pytest with coverage - runs-on: ubuntu-latest - - steps: - - name: Install system packages - run: | - sudo apt-get install -y socat - - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - # (start steps from pytest workflow) - # Install - - name: Install so3g - run: | - pip3 install so3g - - # Fetch all history for all tags and branches - - name: clone socs - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - - name: Install socs - run: | - pip3 install -r requirements.txt - pip3 install -e . - - # Unit Tests - - name: Run unit tests - working-directory: ./tests - run: | - COVERAGE_FILE=.coverage.unit python3 -m pytest --cov -m 'not integtest' - - - name: Build docker test images - run: | - docker-compose build socs - - # Integration Tests - - name: Build images for integration tests - run: | - docker-compose build ocs-lakeshore372-simulator - - - name: Run integration tests - working-directory: ./tests - run: | - COVERAGE_FILE=.coverage.int python3 -m pytest --cov -m 'integtest' - - # Coverage - - name: Report test coverage - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - mv ./tests/.coverage.* ./ - pip install coveralls - coverage combine - coverage report - coveralls --service=github - # (end steps from pytest workflow) + uses: ./.github/workflows/pytest.yml docker: name: build and deploy docker images @@ -76,7 +16,7 @@ jobs: steps: # Fetch all history for all tags and branches - name: clone socs - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 @@ -85,14 +25,17 @@ jobs: run: | docker-compose build + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.REGISTRY_USER }} + password: ${{ secrets.REGISTRY_PASSWORD }} + - name: Build and push official docker image env: - REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} - REGISTRY_USER: ${{ secrets.REGISTRY_USER }} DOCKERHUB_ORG: "simonsobs" run: | export DOCKER_TAG=`git describe --tags --always` - echo "${REGISTRY_PASSWORD}" | docker login -u "${REGISTRY_USER}" --password-stdin; # Tag all images for upload to the registry docker-compose config | grep 'image: ' | awk -F ': ' '{ print $2 }' | xargs -I {} docker tag {}:latest ${DOCKERHUB_ORG}/{}:latest @@ -113,12 +56,12 @@ jobs: steps: - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: 3.8 - name: clone socs - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: install build dependencies run: | diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index 9da0703d7..12797a1d2 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -3,75 +3,16 @@ name: Build Develop Images on: push: branches: [ develop ] + paths-ignore: + - 'docs/**' + - '*.rst' + - '*.md' + - '.flake8' + - '.pre-commit-config.yaml' jobs: test: - name: pytest with coverage - runs-on: ubuntu-latest - - steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.9.1 - with: - access_token: ${{ github.token }} - - - name: Install system packages - run: | - sudo apt-get install -y socat - - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - # (start steps from pytest workflow) - # Install - - name: Install so3g - run: | - pip3 install so3g - - # Fetch all history for all tags and branches - - name: clone socs - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - - name: Install socs - run: | - pip3 install -r requirements.txt - pip3 install -e . - - # Unit Tests - - name: Run unit tests - working-directory: ./tests - run: | - COVERAGE_FILE=.coverage.unit python3 -m pytest --cov -m 'not integtest' - - - name: Build docker test images - run: | - docker-compose build socs - - # Integration Tests - - name: Build images for integration tests - run: | - docker-compose build ocs-lakeshore372-simulator - - - name: Run integration tests - working-directory: ./tests - run: | - COVERAGE_FILE=.coverage.int python3 -m pytest --cov -m 'integtest' - - # Coverage - - name: Report test coverage - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - mv ./tests/.coverage.* ./ - pip install coveralls - coverage combine - coverage report - coveralls --service=github - # (end steps from pytest workflow) + uses: ./.github/workflows/pytest.yml build: name: build and deploy dev images @@ -81,7 +22,7 @@ jobs: steps: # Fetch all history for all tags and branches - name: clone socs - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 @@ -90,15 +31,18 @@ jobs: run: | docker-compose build + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.REGISTRY_USER }} + password: ${{ secrets.REGISTRY_PASSWORD }} + - name: Build and push development docker image env: - REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} - REGISTRY_USER: ${{ secrets.REGISTRY_USER }} DOCKERHUB_ORG: "simonsobs" run: | export DOCKER_TAG=`git describe --tags --always`-dev echo "${DOCKER_TAG}" - echo "${REGISTRY_PASSWORD}" | docker login -u "${REGISTRY_USER}" --password-stdin; # Tag all images for upload to the registry docker-compose config | grep 'image: ' | awk -F ': ' '{ print $2 }' | xargs -I {} docker tag {}:latest ${DOCKERHUB_ORG}/{}:${DOCKER_TAG} @@ -106,3 +50,40 @@ jobs: # Upload to docker registry docker-compose config | grep 'image: ' | awk -F ': ' '{ print $2 }' | xargs -I {} docker push ${DOCKERHUB_ORG}/{}:${DOCKER_TAG} docker-compose config | grep 'image: ' | awk -F ': ' '{ print $2 }' | xargs -I {} echo ${DOCKERHUB_ORG}/{}:${DOCKER_TAG} pushed + + # testing so we can catch any issues before release + # if issues are found, test locally, or copy to pytest.yml for test on push + wheel: + name: build and test wheel + needs: test + runs-on: ubuntu-latest + + steps: + - name: Set up Python 3.8 + uses: actions/setup-python@v3 + with: + python-version: 3.8 + + - name: clone socs + uses: actions/checkout@v3 + + - name: install build dependencies + run: | + python3 -m pip install --upgrade build twine + + - name: build wheel + run: | + python3 -m build + + - name: install wheel + run: | + python3 -m pip install dist/socs*.whl + + - name: install testing requirements + run: | + pip3 install -r requirements/testing.txt + + - name: Run unit tests + working-directory: ./tests + run: | + python3 -m pytest -m 'not integtest' diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 4a90f8887..c7fedb6dc 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -3,7 +3,13 @@ name: Run Tests on: push: branches-ignore: [ develop ] - pull_request: + paths-ignore: + - 'docs/**' + - '*.rst' + - '*.md' + - '.flake8' + - '.pre-commit-config.yaml' + workflow_call: jobs: test: @@ -21,7 +27,7 @@ jobs: sudo apt-get install -y socat - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: 3.8 @@ -31,7 +37,7 @@ jobs: pip3 install so3g - name: clone socs - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install socs run: | @@ -49,10 +55,6 @@ jobs: docker-compose build socs # Integration Tests - - name: Build images for integration tests - run: | - docker-compose build ocs-lakeshore372-simulator - - name: Run integration tests working-directory: ./tests run: | diff --git a/.github/workflows/skipped-pytest.yml b/.github/workflows/skipped-pytest.yml new file mode 100644 index 000000000..f623b8795 --- /dev/null +++ b/.github/workflows/skipped-pytest.yml @@ -0,0 +1,18 @@ +name: Run Tests + +on: + pull_request: + paths: + - 'docs/**' + - '*.rst' + - '*.md' + - '.flake8' + - '.pre-commit-config.yaml' + +jobs: + test: + name: pytest with coverage + runs-on: ubuntu-latest + + steps: + - run: 'echo "No build required" ' diff --git a/.github/workflows/skipped-test-build.yml b/.github/workflows/skipped-test-build.yml new file mode 100644 index 000000000..448a96992 --- /dev/null +++ b/.github/workflows/skipped-test-build.yml @@ -0,0 +1,18 @@ +name: Test Docker Build + +on: + pull_request: + paths: + - 'docs/**' + - '*.rst' + - '*.md' + - '.flake8' + - '.pre-commit-config.yaml' + +jobs: + build: + name: test image build + runs-on: ubuntu-latest + + steps: + - run: 'echo "No build required" ' diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml new file mode 100644 index 000000000..033d62262 --- /dev/null +++ b/.github/workflows/test-build.yml @@ -0,0 +1,27 @@ +name: Test Docker Build + +on: + pull_request: + paths-ignore: + - 'docs/**' + - '*.rst' + - '*.md' + - '.flake8' + - '.pre-commit-config.yaml' + +jobs: + build: + name: test image build + runs-on: ubuntu-latest + + steps: + # Fetch all history for all tags and branches + - name: clone socs + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + # Dockerize + - name: Build docker images + run: | + docker-compose build diff --git a/.gitignore b/.gitignore index ec3744568..406cf7bb0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# iPython Notebooks +*.ipynb + # Crossbar stuff */dot_crossbar/key.priv */dot_crossbar/key.pub @@ -128,3 +131,5 @@ agents/aggregator/data/* agents/aggregator/data2/* key.* node.pid + +.DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..2360ef6d3 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,16 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-ast + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace +- repo: https://github.com/pre-commit/mirrors-autopep8 + rev: v1.6.0 + hooks: + - id: autopep8 +- repo: https://github.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 43c2973b1..267726325 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -52,4 +52,70 @@ Development Guide Contributors should follow the recommendations made in the `SO Developer Guide`_. -.. _SO Developer Guide: https://simons1.princeton.edu/docs/so_dev_guide/html/ +.. _SO Developer Guide: https://simons1.princeton.edu/docs/so_dev_guide/ + +pre-commit +`````````` +As a way to enforce development guide recommendations we have configured +`pre-commit`_. While not required (yet), it is highly recommended you use this +tool when contributing to socs. It will save both you and the reviewers time +when submitting pull requests. + +You should set this up before making and commiting your changes. To do so make +sure the ``pre-commit`` package is installed (it is in ``requirements.txt``):: + + $ pip install -r requirements.txt + +Then run:: + + $ pre-commit install + +This will install the configured git hooks and any dependencies. Now, whenever +you commit the hooks will run. If there are issues you will see them in the +output. This may automatically make changes to your staged files. These +changes will be unstaged and need to be reviewed (typically with a ``git +diff``), restaged, and recommitted. For example, if you have trailing +whitespace on a line, pre-commit will prevent the commit and remove the +whitespace. You will then stage the new changes with another ``git add `` +and then re-run the commit. Here is the expected git output for this example: + +.. code-block:: + + $ vim demo.py + $ git status + On branch koopman/test-pre-commit + Changes not staged for commit: + (use "git add ..." to update what will be committed) + (use "git restore ..." to discard changes in working directory) + modified: demo.py + + no changes added to commit (use "git add" and/or "git commit -a") + $ git add demo.py + $ git commit + Check python ast.........................................................Passed + Fix End of Files.........................................................Passed + Trim Trailing Whitespace.................................................Failed + - hook id: trailing-whitespace + - exit code: 1 + - files were modified by this hook + + Fixing demo/demo.py + + $ git status + On branch koopman/test-pre-commit + Changes to be committed: + (use "git restore --staged ..." to unstage) + modified: demo.py + + Changes not staged for commit: + (use "git add ..." to update what will be committed) + (use "git restore ..." to discard changes in working directory) + modified: demo.py + $ git add -u + $ git commit + +**Note:** This is a new tool to this repo, and the flake8 output might still be +somewhat strict. If there are warnings that you think should be ignored, please +bring this up for discussion in a new issue. + +.. _pre-commit: https://pre-commit.com/ diff --git a/README.rst b/README.rst index 4d73fa891..4dffe4b64 100644 --- a/README.rst +++ b/README.rst @@ -37,6 +37,10 @@ Install and update with pip:: $ pip3 install -U socs +If you need to install the optional so3g module you can do so via:: + + $ pip3 install -U socs[so3g] + Installing from Source `````````````````````` diff --git a/agents/hwp_rotation/Dockerfile b/agents/hwp_rotation/Dockerfile new file mode 100644 index 000000000..74f919795 --- /dev/null +++ b/agents/hwp_rotation/Dockerfile @@ -0,0 +1,17 @@ +# SOCS Agent container for controlling the HWP rotation speed and direction + +# Use socs base image +FROM socs:latest + +# Set the working directory to registry directory +WORKDIR /app/socs/agents/hwp_rotation/ + +## Copy this agent into the app/agents directory +COPY . . + +# Run agent on container startup +ENTRYPOINT ["dumb-init", "python3", "-u", "rotation_agent.py"] + +CMD ["--site-hub=ws://crossbar:8001/ws", \ + "--site-http=http://crossbar:8001/call"] + diff --git a/agents/hwp_rotation/rotation_agent.py b/agents/hwp_rotation/rotation_agent.py new file mode 100644 index 000000000..6299dd614 --- /dev/null +++ b/agents/hwp_rotation/rotation_agent.py @@ -0,0 +1,502 @@ +import os +import argparse +import time +from twisted.internet import reactor + +from ocs import ocs_agent, site_config +from ocs.ocs_twisted import TimeoutLock + +from socs.agent.pmx import PMX, Command + +on_rtd = os.environ.get('READTHEDOCS') == 'True' +if not on_rtd: + import src.pid_controller as pd + + +class RotationAgent: + """Agent to control the rotation speed of the CHWP + + Args: + kikusui_ip (str): IP address for the Kikusui power supply + kikusui_port (str): Port for the Kikusui power supply + pid_ip (str): IP address for the PID controller + pid_port (str): Port for the PID controller + pid_verbosity (str): Verbosity of PID controller output + + """ + + def __init__(self, agent, kikusui_ip, kikusui_port, pid_ip, pid_port, pid_verbosity): + self.agent = agent + self.log = agent.log + self.lock = TimeoutLock() + self._initialized = False + self.take_data = False + self.kikusui_ip = kikusui_ip + self.kikusui_port = int(kikusui_port) + self.pid_ip = pid_ip + self.pid_port = pid_port + self._pid_verbosity = pid_verbosity > 0 + self.cmd = None # Command object for PSU commanding + self.pid = None # PID object for pid controller commanding + + agg_params = {'frame_length': 60} + self.agent.register_feed( + 'hwprotation', record=True, agg_params=agg_params) + + @ocs_agent.param('auto_acquire', default=False, type=bool) + @ocs_agent.param('force', default=False, type=bool) + def init_connection(self, session, params): + """init_connection(auto_acquire=False, force=False) + + **Task** - Initialize connection to Kikusui Power Supply and PID + Controller. + + Parameters: + auto_acquire (bool, optional): Default is False. Starts data + acquisition after initialization if True. + force (bool, optional): Force initialization, even if already + initialized. Defaults to False. + + """ + if self._initialized and not params['force']: + self.log.info("Connection already initialized. Returning...") + return True, "Connection already initialized" + + with self.lock.acquire_timeout(0, job='init_connection') as acquired: + if not acquired: + self.log.warn( + 'Could not run init_connection because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + try: + pmx = PMX(tcp_ip=self.kikusui_ip, + tcp_port=self.kikusui_port, timeout=0.5) + self.cmd = Command(pmx) + self.log.info('Connected to Kikusui power supply') + except ConnectionRefusedError: + self.log.error('Could not establish connection to Kikusui power supply') + reactor.callFromThread(reactor.stop) + return False, 'Unable to connect to Kikusui PSU' + + try: + self.pid = pd.PID(pid_ip=self.pid_ip, pid_port=self.pid_port, + verb=self._pid_verbosity) + self.log.info('Connected to PID controller') + except BrokenPipeError: + self.log.error('Could not establish connection to PID controller') + reactor.callFromThread(reactor.stop) + return False, 'Unable to connect to PID controller' + + self._initialized = True + + # Start 'iv_acq' Process if requested + if params['auto_acquire']: + self.agent.start('iv_acq') + + return True, 'Connection to PSU and PID controller established' + + def tune_stop(self, session, params): + """tune_stop() + + **Task** - Reverse the drive direction of the PID controller and + optimize the PID parameters for deceleration. + + """ + with self.lock.acquire_timeout(3, job='tune_stop') as acquired: + if not acquired: + self.log.warn( + 'Could not tune stop because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + self.pid.tune_stop() + + return True, 'Reversing Direction' + + def tune_freq(self, session, params): + """tune_freq() + + **Task** - Tune the PID controller setpoint to the rotation frequency + and optimize the PID parameters for rotation. + + """ + with self.lock.acquire_timeout(3, job='tune_freq') as acquired: + if not acquired: + self.log.warn( + 'Could not tune freq because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + self.pid.tune_freq() + + return True, 'Tuning to setpoint' + + @ocs_agent.param('freq', default=0., check=lambda x: 0. <= x <= 3.0) + def declare_freq(self, session, params): + """declare_freq(freq=0) + + **Task** - Store the entered frequency as the PID setpoint when + ``tune_freq()`` is next called. + + Parameters: + freq (float): Desired HWP rotation frequency + + """ + with self.lock.acquire_timeout(3, job='declare_freq') as acquired: + if not acquired: + self.log.warn( + 'Could not declare freq because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + self.pid.declare_freq(params['freq']) + + return True, 'Setpoint at {} Hz'.format(params['freq']) + + @ocs_agent.param('p', default=0.2, type=float, check=lambda x: 0. < x <= 8.) + @ocs_agent.param('i', default=63, type=int, check=lambda x: 0 <= x <= 200) + @ocs_agent.param('d', default=0., type=float, check=lambda x: 0. <= x < 10.) + def set_pid(self, session, params): + """set_pid(p=0.2, i=63, d=0.) + + **Task** - Set the PID parameters. Note these changes are for the + current session only and will change whenever the agent container is + reloaded. + + Parameters: + p (float): Proportional PID value + i (int): Integral PID value + d (float): Derivative PID value + + """ + with self.lock.acquire_timeout(3, job='set_pid') as acquired: + if not acquired: + self.log.warn( + 'Could not set pid because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + self.pid.set_pid( + [params['p'], params['i'], params['d']]) + + return True, f"Set PID params to p: {params['p']}, i: {params['i']}, d: {params['d']}" + + def get_freq(self, session, params): + """get_freq() + + **Task** - Return the current HWP frequency as seen by the PID + controller. + + """ + with self.lock.acquire_timeout(3, job='get_freq') as acquired: + if not acquired: + self.log.warn( + 'Could not get freq because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + freq = self.pid.get_freq() + + return True, 'Current frequency = {}'.format(freq) + + def get_direction(self, session, params): + """get_direction() + + **Task** - Return the current HWP tune direction as seen by the PID + controller. + + """ + with self.lock.acquire_timeout(3, job='get_direction') as acquired: + if not acquired: + self.log.warn( + 'Could not get freq because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + direction = self.pid.get_direction() + + return True, 'Current Direction = {}'.format(['Forward', 'Reverse'][direction]) + + @ocs_agent.param('direction', type=str, default='0', choices=['0', '1']) + def set_direction(self, session, params): + """set_direction(direction='0') + + **Task** - Set the HWP rotation direction. + + Parameters: + direction (str): '0' for forward and '1' for reverse. + + """ + with self.lock.acquire_timeout(3, job='set_direction') as acquired: + if not acquired: + self.log.warn( + 'Could not set direction because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + self.pid.set_direction(params['direction']) + + return True, 'Set direction' + + @ocs_agent.param('slope', default=1., type=float, check=lambda x: -10. < x < 10.) + @ocs_agent.param('offset', default=0.1, type=float, check=lambda x: -10. < x < 10.) + def set_scale(self, session, params): + """set_scale(slope=1, offset=0.1) + + **Task** - Set the PID's internal conversion from input voltage to + rotation frequency. + + Parameters: + slope (float): Slope of the "rotation frequency vs input voltage" + relationship + offset (float): y-intercept of the "rotation frequency vs input + voltage" relationship + + """ + with self.lock.acquire_timeout(3, job='set_scale') as acquired: + if not acquired: + self.log.warn( + 'Could not set scale because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + self.pid.set_scale(params['slope'], params['offset']) + + return True, 'Set scale' + + def set_on(self, session, params): + """set_on() + + **Task** - Turn on the Kikusui drive voltage. + + """ + with self.lock.acquire_timeout(3, job='set_on') as acquired: + if not acquired: + self.log.warn( + 'Could not set on because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + time.sleep(1) + self.cmd.user_input('on') + + return True, 'Set Kikusui on' + + def set_off(self, session, params): + """set_off() + + **Task** - Turn off the Kikusui drive voltage. + + """ + with self.lock.acquire_timeout(3, job='set_off') as acquired: + if not acquired: + self.log.warn( + 'Could not set off because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + time.sleep(1) + self.cmd.user_input('off') + + return True, 'Set Kikusui off' + + @ocs_agent.param('volt', default=0, type=float, check=lambda x: 0 <= x <= 35) + def set_v(self, session, params): + """set_v(volt=0) + + **Task** - Set the Kikusui drive voltage. + + Parameters: + volt (float): Kikusui set voltage + + """ + with self.lock.acquire_timeout(3, job='set_v') as acquired: + if not acquired: + self.log.warn( + 'Could not set v because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + time.sleep(1) + self.cmd.user_input('V {}'.format(params['volt'])) + + return True, 'Set Kikusui voltage to {} V'.format(params['volt']) + + @ocs_agent.param('volt', default=32., type=float, check=lambda x: 0. <= x <= 35.) + def set_v_lim(self, session, params): + """set_v_lim(volt=32) + + **Task** - Set the Kikusui drive voltage limit. + + Parameters: + volt (float): Kikusui limit voltage + + """ + with self.lock.acquire_timeout(3, job='set_v_lim') as acquired: + if not acquired: + self.log.warn( + 'Could not set v lim because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + time.sleep(1) + print(params['volt']) + self.cmd.user_input('VL {}'.format(params['volt'])) + + return True, 'Set Kikusui voltage limit to {} V'.format(params['volt']) + + def use_ext(self, session, params): + """use_ext() + + **Task** - Set the Kikusui to use an external voltage control. Doing so + enables PID control. + + """ + with self.lock.acquire_timeout(3, job='use_ext') as acquired: + if not acquired: + self.log.warn( + 'Could not use external voltage because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + time.sleep(1) + self.cmd.user_input('U') + + return True, 'Set Kikusui voltage to PID control' + + def ign_ext(self, session, params): + """ign_ext() + + **Task** - Set the Kiksui to ignore external voltage control. Doing so + disables the PID and switches to direct control. + + """ + with self.lock.acquire_timeout(3, job='ign_ext') as acquired: + if not acquired: + self.log.warn( + 'Could not ignore external voltage because {} is already running'.format(self.lock.job)) + return False, 'Could not acquire lock' + + time.sleep(1) + self.cmd.user_input('I') + + return True, 'Set Kikusui voltage to direct control' + + @ocs_agent.param('test_mode', default=False, type=bool) + def iv_acq(self, session, params): + """iv_acq(test_mode=False) + + **Process** - Start Kikusui data acquisition. + + Parameters: + test_mode (bool, optional): Run the Process loop only once. + This is meant only for testing. Default is False. + + Notes: + The most recent data collected is stored in the session data in the + structure:: + + >>> response.session['data'] + {'kikusui_volt': 0, + 'kikusui_curr': 0, + 'last_updated': 1649085992.719602} + + """ + with self.lock.acquire_timeout(timeout=0, job='iv_acq') as acquired: + if not acquired: + self.log.warn('Could not start iv acq because {} is already running' + .format(self.lock.job)) + return False, 'Could not acquire lock' + + session.set_status('running') + last_release = time.time() + self.take_data = True + + while self.take_data: + # Relinquish sampling lock occasionally. + if time.time() - last_release > 1.: + last_release = time.time() + if not self.lock.release_and_acquire(timeout=10): + self.log.warn(f"Failed to re-acquire sampling lock, " + f"currently held by {self.lock.job}.") + continue + + data = {'timestamp': time.time(), + 'block_name': 'HWPKikusui_IV', 'data': {}} + + v_msg, v_val = self.cmd.user_input('V?') + i_msg, i_val = self.cmd.user_input('C?') + + data['data']['kikusui_volt'] = v_val + data['data']['kikusui_curr'] = i_val + + self.agent.publish_to_feed('hwprotation', data) + + session.data = {'kikusui_volt': v_val, + 'kikusui_curr': i_val, + 'last_updated': time.time()} + + time.sleep(1) + + if params['test_mode']: + break + + self.agent.feeds['hwprotation'].flush_buffer() + return True, 'Acqusition exited cleanly' + + def _stop_iv_acq(self, session, params): + """ + Stop iv_acq process. + """ + if self.take_data: + self.take_data = False + return True, 'requested to stop taking data' + + return False, 'acq is not currently running' + + +def make_parser(parser=None): + """ + Build the argument parser for the Agent. Allows sphinx to automatically build documentation + baised on this function + """ + if parser is None: + parser = argparse.ArgumentParser() + + # Add options specific to this agent + pgroup = parser.add_argument_group('Agent Options') + pgroup.add_argument('--kikusui-ip') + pgroup.add_argument('--kikusui-port') + pgroup.add_argument('--pid-ip') + pgroup.add_argument('--pid-port') + pgroup.add_argument('--verbose', '-v', action='count', default=0, + help='PID Controller verbosity level.') + pgroup.add_argument('--mode', type=str, default='iv_acq', + choices=['idle', 'init', 'iv_acq'], + help="Starting operation for the Agent.") + return parser + + +if __name__ == '__main__': + parser = make_parser() + args = site_config.parse_args(agent_class='RotationAgent', parser=parser) + + init_params = False + if args.mode == 'init': + init_params = {'auto_acquire': False} + elif args.mode == 'iv_acq': + init_params = {'auto_acquire': True} + + agent, runner = ocs_agent.init_site_agent(args) + rotation_agent = RotationAgent(agent, kikusui_ip=args.kikusui_ip, + kikusui_port=args.kikusui_port, + pid_ip=args.pid_ip, + pid_port=args.pid_port, + pid_verbosity=args.verbose) + agent.register_process('iv_acq', rotation_agent.iv_acq, + rotation_agent._stop_iv_acq) + agent.register_task('init_connection', rotation_agent.init_connection, + startup=init_params) + agent.register_task('tune_stop', rotation_agent.tune_stop) + agent.register_task('tune_freq', rotation_agent.tune_freq) + agent.register_task('declare_freq', rotation_agent.declare_freq) + agent.register_task('set_pid', rotation_agent.set_pid) + agent.register_task('get_freq', rotation_agent.get_freq) + agent.register_task('get_direction', rotation_agent.get_direction) + agent.register_task('set_direction', rotation_agent.set_direction) + agent.register_task('set_scale', rotation_agent.set_scale) + agent.register_task('set_on', rotation_agent.set_on) + agent.register_task('set_off', rotation_agent.set_off) + agent.register_task('set_v', rotation_agent.set_v) + agent.register_task('set_v_lim', rotation_agent.set_v_lim) + agent.register_task('use_ext', rotation_agent.use_ext) + agent.register_task('ign_ext', rotation_agent.ign_ext) + + runner.run(agent, auto_reconnect=True) diff --git a/agents/hwp_rotation/src/__init__.py b/agents/hwp_rotation/src/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/agents/hwp_rotation/src/pid_controller.py b/agents/hwp_rotation/src/pid_controller.py new file mode 100644 index 000000000..76453e502 --- /dev/null +++ b/agents/hwp_rotation/src/pid_controller.py @@ -0,0 +1,327 @@ +import time +import socket + + +class PID: + """Class to communicate with the Omega CNi16D54-EIT PID controller. + + Args: + pid_ip (str): IP address for the controller. + pid_port (int): Port number for the socket connection. + verb (bool): Verbose output setting. Defaults to False. + + Attributes: + verb (bool): Verbose output setting. + hex_freq (str): Currently declared rotation frequency in hexadecimal. + direction (int): Current direction of the HWP. 0 for forward and 1 for + backwards. + conn (socket.socket): Socket object with open connection to the PID + controller. + + """ + def __init__(self, pid_ip, pid_port, verb=False): + self.verb = verb + self.hex_freq = '00000' + self.direction = None + # Need to setup connection before setting direction + self.conn = self._establish_connection(pid_ip, int(pid_port)) + self.set_direction('0') + + @staticmethod + def _establish_connection(ip, port, timeout=5): + """Connect to PID controller. + + Args: + ip (str): IP address for controller. + port (int): Port number for socket connection. + timeout (float): Time in seconds to wait for comms until timeout + occurs. + + Returns: + socket.socket: Socket object with open connection to the PID + controller. + + """ + conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + conn.connect((ip, port)) + except ConnectionRefusedError: + print(f"Failed to connect to device at {ip}:{port}") + conn.settimeout(timeout) + + return conn + + @staticmethod + def _convert_to_hex(value, decimal): + """Converts the user input into a format the PID controller can + read. + + """ + temp_value = hex(int(10**decimal * float(value))) + return ('0000' + str(temp_value)[2:].upper())[-4:] + + @staticmethod + def _get_scale_hex(num, corr): + """Gets the exponent in scientific notation.""" + expo = int(str(num * 10**40 + 0.01).split('+')[1]) - 40 + digits = round(num * 10**(-expo + 4)) + + expo_hex = str(hex(corr - expo))[2:] + digits_hex = ('00000' + str(hex(digits))[2:])[-5:] + + return expo_hex + digits_hex + + ###################################################################### + # Main Processes + ###################################################################### + + def set_direction(self, direction): + """Sets the direction if the CHWP. + + Modifies the ``direction`` attribute. + + Args: + direction (int): 0 for forward and 1 for backwards + + """ + if direction == '0': + if self.verb: + print('Forward') + resp = self.send_message("*W02400000") + self.direction = 0 + elif direction == '1': + if self.verb: + print('Reverse') + resp = self.send_message("*W02401388") + self.direction = 1 + + if self.verb: + print(self.return_messages([resp])[0]) + + def declare_freq(self, freq): + """Declare to memory what the CHWP frequency should be. + + **Note:** Does not actually change the frequency. + + Args: + freq (float): Set freqency in Hz. Must be less than 3.5. + + """ + if float(freq) <= 3.5: + self.hex_freq = '0' + self._convert_to_hex(freq, 3) + if self.verb: + print('Frequency Setpoint = ' + str(freq) + ' Hz') + else: + if self.verb: + print('Invalid Frequency') + + def tune_stop(self): + """Set the setpoint to 0 Hz and stop the CHWP.""" + if self.verb: + print('Starting Stop') + + responses = [] + responses.append(self.send_message("*W0C83")) + responses.append(self.send_message("*W01400000")) + responses.append(self.send_message("*R01")) + responses.append(self.send_message("*Z02")) + if self.verb: + print(responses) + print(self.return_messages(responses)) + + stop_params = [0.2, 0, 0] + self.set_pid(stop_params) + + def tune_freq(self): + """Set the setpoint to currently declared frequency. + + To declare the frequency, use ``PID.declare_freq()``. + + """ + if self.verb: + print('Starting Tune') + + responses = [] + responses.append(self.send_message("*W0C81")) + responses.append(self.send_message(f"*W014{self.hex_freq}")) + responses.append(self.send_message("*R01")) + responses.append(self.send_message("*Z02")) + if self.verb: + print(responses) + print(self.return_messages(responses)) + + tune_params = [0.2, 63, 0] + self.set_pid(tune_params) + + def get_freq(self): + """Returns the current frequency of the CHWP.""" + if self.verb: + print('Finding CHWP Frequency') + + responses = [] + responses.append(self.send_message("*X01")) + if self.verb: + print(responses) + + freq = self.return_messages(responses)[0] + return freq + + def get_direction(self): + """Get the current rotation direction. + + Returns: + int: 0 for forward and 1 for backwards + + """ + if self.verb: + print('Finding CHWP Direction') + + responses = [] + responses.append(self.send_message("*R02")) + + direction = self.return_messages(responses)[0] + if direction == 1: + print('Direction = Reverse') + elif direction == 0: + print('Direction = Forward') + self.direction = direction + + return direction + + def set_pid(self, params): + """Sets the PID parameters of the controller.""" + if self.verb: + print('Setting PID Params') + + p_value = self._convert_to_hex(params[0], 3) + i_value = self._convert_to_hex(params[1], 0) + d_value = self._convert_to_hex(params[2], 1) + + responses = [] + responses.append(self.send_message(f"*W17{p_value}")) + responses.append(self.send_message(f"*W18{i_value}")) + responses.append(self.send_message(f"*W19{d_value}")) + responses.append(self.send_message("*Z02")) + if self.verb: + print(responses) + print(self.return_messages(responses)) + + def set_scale(self, slope, offset): + """Set the conversion between feedback voltage and approximate + frequency. + + """ + slope_hex = self._get_scale_hex(slope, 1) + offset_hex = self._get_scale_hex(offset, 2) + + responses = [] + responses.append(self.send_message(f"*W14{slope_hex}")) + responses.append(self.send_message(f"*W03{offset_hex}")) + responses.append(self.send_message("*Z02")) + if self.verb: + print(responses) + print(self.return_messages(responses)) + + ###################################################################### + # Messaging + ###################################################################### + + def send_message(self, msg): + """Send message over TCP to the PID controller. + + Args: + msg (str): Command to send to the controller. + + Returns: + str: Respnose from the controller. + + """ + self.conn.sendall((msg + '\r\n').encode()) + time.sleep(0.5) # Don't send messages too quickly + for attempt in range(2): + try: + data = self.conn.recv(4096).decode().strip() + break + except socket.timeout: + print("Caught timeout waiting for response from PID controller. " + + "Trying again...") + time.sleep(1) + if attempt == 1: + raise RuntimeError( + 'Response from PID controller timed out.') + return data + + def return_messages(self, msg): + """Decode list of responses from PID controller and return useful + values. + + Args: + msg (list): List of messages to decode. + + Returns: + list: Decoded responses. + + """ + return self._decode_array(msg) + + @staticmethod + def _decode_array(input_array): + output_array = list(input_array) + + for index, string in enumerate(list(input_array)): + header = string[0] + + if header == 'R': + output_array[index] = PID._decode_read(string) + elif header == 'W': + output_array[index] = PID._decode_write(string) + elif header == 'E': + output_array[index] = 'PID Enabled' + elif header == 'D': + output_array[index] = 'PID Disabled' + elif header == 'P': + pass + elif header == 'G': + pass + elif header == 'X': + output_array[index] = PID._decode_measure(string) + else: + pass + + return output_array + + @staticmethod + def _decode_read(string): + read_type = string[1:3] + if read_type == '01': + return 'Setpoint = ' + str(int(string[4:], 16) / 1000.) + # Decode direction + if read_type == '02': + if int(string[4:], 16) / 1000. > 2.5: + print('Direction = Reverse') + return 1 + else: + print('Direction = Forward') + return 0 + else: + return 'Unrecognized Read' + + @staticmethod + def _decode_write(string): + write_type = string[1:] + if write_type == '01': + return 'Changed Setpoint' + if write_type == '02': + return 'Changed Direction' + if write_type == '0C': + return 'Changed Action Type' + else: + return 'Unrecognized Write' + + @staticmethod + def _decode_measure(string): + measure_type = string[1:3] + if measure_type == '01': + return float(string[3:]) + else: + return 9.999 diff --git a/agents/hwp_sim/hwp_simulator_agent.py b/agents/hwp_sim/hwp_simulator_agent.py deleted file mode 100644 index 1beeb7a5d..000000000 --- a/agents/hwp_sim/hwp_simulator_agent.py +++ /dev/null @@ -1,125 +0,0 @@ -import argparse - -from ocs import ocs_agent, site_config, client_t -# import random -import time -import threading -import serial -# import os, sys -from ocs.ocs_twisted import TimeoutLock - -from autobahn.wamp.exception import ApplicationError - -class HWPSimulator: - def __init__(self, port='/dev/ttyACM0', baud=9600, timeout=0.1): - self.com = serial.Serial(port=port, baudrate=baud, timeout=timeout) - - def read(self): - """ - Reads data from an Arduino. First decodes the read data, then splits - the read line to remove doubled values and takes the second one. - """ - try: - data = bytes.decode(self.com.readline()[:-2]) - sin_data = float(data.split(' ')[1]) - return sin_data - except Exception as e: - print(e) - - -class HWPSimulatorAgent: - - def __init__(self, agent, port='/dev/ttyACM0'): - self.active = True - self.agent = agent - self.log = agent.log - self.lock = TimeoutLock() - self.port = port - self.take_data = False - self.arduino = HWPSimulator(port=self.port) - - self.initialized = False - - agg_params = {'frame_length': 60} - self.agent.register_feed('amplitudes', record=True, agg_params=agg_params,buffer_time=1) - - - def init_arduino(self): - """ - Initializes the Arduino connection. - """ - if self.initialized: - return True, "Already initialized." - - with self.lock.acquire_timeout(timeout=0, job='init') as acquired: - if not acquired: - self.log.warn("Could not start init because {} is already running".format(self.lock.job)) - return False, "Could not acquire lock." - try: - self.arduino.read() - except ValueError: - pass - print("Arduino HWP Simulator initialized.") - - self.initialized = True - return True, 'Arduino HWP Simulator initialized.' - - - def start_acq(self, session, params): - """Starts acquiring data. - - Args: - sampling_frequency (float): - Sampling frequency for data collection. Defaults to 2.5 Hz - - """ - f_sample = params.get('sampling_frequency', 2.5) - sleep_time = 1/f_sample - 0.1 - - if not self.initialized: - self.init_arduino() - - with self.lock.acquire_timeout(timeout=0, job='acq') as acquired: - if not acquired: - self.log.warn("Could not start acq because {} is already running".format(self.lock.job)) - return False, "Could not acquire lock." - - session.set_status('running') - - self.take_data = True - - while self.take_data: - data = {'timestamp':time.time(), 'block_name':'amps','data':{}} - - data['data']['amplitude'] = self.arduino.read() - time.sleep(sleep_time) - self.agent.publish_to_feed('amplitudes',data) - - self.agent.feeds['amplitudes'].flush_buffer() - - return True, 'Acquisition exited cleanly.' - - def stop_acq(self, session, params=None): - """ - Stops the data acquisiton. - """ - if self.take_data: - self.take_data = False - return True, 'requested to stop taking data.' - else: - return False, 'acq is not currently running.' - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - args = site_config.parse_args(agent_class='HWPSimulatorAgent', - parser=parser) - - agent, runner = ocs_agent.init_site_agent(args) - - arduino_agent = HWPSimulatorAgent(agent) - - agent.register_task('init_arduino', arduino_agent.init_arduino) - agent.register_process('acq', arduino_agent.start_acq, arduino_agent.stop_acq, startup=True) - - runner.run(agent, auto_reconnect=True) diff --git a/agents/labjack/labjack_agent.py b/agents/labjack/labjack_agent.py index 3eabb308d..e52a26d5d 100644 --- a/agents/labjack/labjack_agent.py +++ b/agents/labjack/labjack_agent.py @@ -367,13 +367,15 @@ def _stop_acq(self, session, params=None): else: return False, 'acq is not currently running' - def start_acq_reg(self, session, params=None): - """ - Task to start data acquisition when you want to read out - non-standard registers. In particular the custom registers - labjack has built for reading out thermocouples. Maximum is - about 2.5 Hz but is set by the register read time which is - estimated at the beginning of the acq_reg setup step. + def acq_reg(self, session, params=None): + """acq_reg(sampling_frequency=2.5) + + **Task** - Start data acquisition when you want to read out + non-standard registers. + + In particular the custom registers labjack was built for reading out + thermocouples. Maximum is about 2.5 Hz but is set by the register read + time which is estimated at the beginning of the acq_reg setup step. Args: sampling_frequency (float): @@ -521,7 +523,7 @@ def make_parser(parser=None): agent.register_task('init_labjack', sensors.init_labjack, startup=init_params) - agent.register_process('acq', sensors.start_acq, sensors.stop_acq) - agent.register_process('acq_reg', sensors.start_acq_reg, - sensors.stop_acq) + agent.register_process('acq', sensors.acq, sensors._stop_acq) + agent.register_process('acq_reg', sensors.acq_reg, + sensors._stop_acq) runner.run(agent, auto_reconnect=True) diff --git a/agents/lakeshore372/LS372_agent.py b/agents/lakeshore372/LS372_agent.py index 53809308a..62a06c909 100644 --- a/agents/lakeshore372/LS372_agent.py +++ b/agents/lakeshore372/LS372_agent.py @@ -5,6 +5,7 @@ import numpy as np import txaio import threading +import yaml from contextlib import contextmanager from twisted.internet import reactor @@ -115,7 +116,7 @@ def __init__(self, agent, name, ip, fake_data=False, dwell_time_delay=0, self.agent = agent # Registers temperature feeds agg_params = { - 'frame_length': 10*60 #[sec] + 'frame_length': 10*60 # [sec] } self.agent.register_feed('temperatures', record=True, @@ -145,8 +146,10 @@ def disable_control_chan(self, session, params=None): @ocs_agent.param('auto_acquire', default=False, type=bool) @ocs_agent.param('acq_params', type=dict, default=None) @ocs_agent.param('force', default=False, type=bool) + @ocs_agent.param('configfile', type=str, default=None) def init_lakeshore(self, session, params=None): - """init_lakeshore(auto_acquire=False, acq_params=None, force=False) + """init_lakeshore(auto_acquire=False, acq_params=None, force=False, + configfile=None) **Task** - Perform first time setup of the Lakeshore 372 communication. @@ -157,11 +160,12 @@ def init_lakeshore(self, session, params=None): auto_acquire is True. force (bool, optional): Force initialization, even if already initialized. Defaults to False. + configfile (str, optional): .yaml file for initializing 372 channel + settings """ if params is None: params = {} - if self.initialized and not params.get('force', False): self.log.info("Lakeshore already initialized. Returning...") return True, "Already initialized" @@ -203,6 +207,10 @@ def init_lakeshore(self, session, params=None): self.initialized = True + if params.get('configfile') is not None: + self.input_configfile(session, params) + session.add_message("Lakeshore initial configurations uploaded using: %s"%params['configfile']) + # Start data acquisition if requested if params.get('auto_acquire', False): self.agent.start('acq', params.get('acq_params', None)) @@ -773,7 +781,7 @@ def servo_to_temperature(self, session, params): # Make sure we aren't servoing too high in temperature. if params["temperature"] > 1: - return False, f'Servo temperature is set above 1K. Aborting.' + return False, 'Servo temperature is set above 1K. Aborting.' self.module.sample_heater.set_setpoint(params["temperature"]) @@ -912,7 +920,7 @@ def set_heater_output(self, session, params=None): session.app.publish_to_feed('temperatures', data) return True, "Set {} display to {}, output to {}".format(heater, display, output) - + @ocs_agent.param('output', type=float, check=lambda x: 0 <= x <= 100) def set_still_output(self, session, params=None): """set_still_output(output=None) @@ -977,6 +985,90 @@ def get_still_output(self, session, params=None): return True, "Current still output is {}".format(still_output) + @ocs_agent.param('configfile', type=str) + def input_configfile(self, session, params=None): + """input_configfile(configfile=None) + + **Task** - Upload 372 configuration file to initialize channel/device + settings. + + Parameters: + configfile (str): name of .yaml config file + + """ + with self._lock.acquire_timeout(job='input_configfile') as acquired: + if not acquired: + self.log.warn(f"Could not start Task because " + f"{self._lock.job} is already running") + return False, "Could not acquire lock" + + # path to configfile in docker container + configpath = os.environ.get("OCS_CONFIG_DIR", "/config/") + configfile = params['configfile'] + + ls372configs = os.path.join(configpath, configfile) + with open(ls372configs) as f: + config = yaml.safe_load(f) + + ls = self.module + ls_serial = ls.id.split(',')[2] + + device_config = config[ls_serial]['device_settings'] + ls_chann_settings = config[ls_serial]['channel'] + + session.set_status('running') + + # enable/disable autoscan + if device_config['autoscan'] == 'on': + ls.enable_autoscan() + self.log.info("autoscan enabled") + elif device_config['autoscan'] == 'off': + ls.disable_autoscan() + self.log.info("autoscan disabled") + + for i in ls_chann_settings: + # enable/disable channel + if ls_chann_settings[i]['enable'] == 'on': + ls.channels[i].enable_channel() + self.log.info("CH.{channel} enabled".format(channel=i)) + elif ls_chann_settings[i]['enable'] == 'off': + ls.channels[i].disable_channel() + self.log.info("CH.{channel} disabled".format(channel=i)) + + # autorange + if ls_chann_settings[i]['autorange'] == 'on': + ls.channels[i].enable_autorange() + self.log.info("autorange on") + elif ls_chann_settings[i]['autorange'] == 'off': + ls.channels[i].disable_autorange() + self.log.info("autorange off") + + excitation_mode = ls_chann_settings[i]['excitation_mode'] + ls.channels[i].set_excitation_mode(excitation_mode) + self.log.info("excitation mode for CH.{channel} set to {exc_mode}".format(channel=i, exc_mode=excitation_mode)) + + excitation_value = ls_chann_settings[i]['excitation_value'] + ls.channels[i].set_excitation(excitation_value) + self.log.info("excitation for CH.{channel} set to {exc}".format(channel=i, exc=excitation_value)) + + dwell = ls_chann_settings[i]['dwell'] + ls.channels[i].set_dwell(dwell) + self.log.info("dwell for CH.{channel} is set to {dwell}".format(channel=i, dwell=dwell)) + + pause = ls_chann_settings[i]['pause'] + ls.channels[i].set_pause(pause) + self.log.info("pause for CH.{channel} is set to {pause}".format(channel=i, pause=pause)) + + calibration_curvenum = ls_chann_settings[i]['calibration_curve_num'] + ls.channels[i].set_calibration_curve(calibration_curvenum) + self.log.info("calibration curve for CH.{channel} set to {cal_curve}".format(channel=i, cal_curve=calibration_curvenum)) + tempco = ls_chann_settings[i]['temperature_coeff'] + ls.channels[i].set_temperature_coefficient(tempco) + self.log.info("temperature coeff. for CH.{channel} set to {tempco}".format(channel=i, tempco=tempco)) + + return True, "Uploaded {}".format(configfile) + + def make_parser(parser=None): """Build the argument parser for the Agent. Allows sphinx to automatically build documentation based on this function. @@ -1007,9 +1099,11 @@ def make_parser(parser=None): help='Record sample heater output during acquisition.') pgroup.add_argument('--enable-control-chan', action='store_true', help='Enable reading of the control input each acq cycle') + pgroup.add_argument('--configfile', type=str, help='Yaml file for initializing 372 settings') return parser + if __name__ == '__main__': # For logging txaio.use_twisted() @@ -1025,7 +1119,8 @@ def make_parser(parser=None): init_params = False if args.mode == 'init': init_params = {'auto_acquire': False, - 'acq_params': {'sample_heater': args.sample_heater}} + 'acq_params': {'sample_heater': args.sample_heater}, + 'configfile': args.configfile} elif args.mode == 'acq': init_params = {'auto_acquire': True, 'acq_params': {'sample_heater': args.sample_heater}} @@ -1062,5 +1157,6 @@ def make_parser(parser=None): agent.register_process('acq', lake_agent.acq, lake_agent._stop_acq) agent.register_task('enable_control_chan', lake_agent.enable_control_chan) agent.register_task('disable_control_chan', lake_agent.disable_control_chan) + agent.register_task('input_configfile', lake_agent.input_configfile) runner.run(agent, auto_reconnect=True) diff --git a/agents/magpie/Dockerfile b/agents/magpie/Dockerfile new file mode 100644 index 000000000..32c02e02c --- /dev/null +++ b/agents/magpie/Dockerfile @@ -0,0 +1,12 @@ +# Use socs base image +FROM socs:latest + +RUN pip3 install pandas + +# Set the working directory to registry directory +WORKDIR /app/socs/agents/magpie +COPY . . + + +# Run registry on container startup +ENTRYPOINT ["dumb-init", "python3", "-u", "magpie_agent.py"] diff --git a/agents/magpie/magpie_agent.py b/agents/magpie/magpie_agent.py new file mode 100644 index 000000000..efb4b90e8 --- /dev/null +++ b/agents/magpie/magpie_agent.py @@ -0,0 +1,894 @@ +import argparse +import so3g +from spt3g import core +import txaio +import os +import numpy as np +import yaml +import ast +from scipy import signal +import queue +import time +from ocs import ocs_agent, site_config + +MAX_CHANS = 4096 + + +# Map from primary key-names to their index in the SuperTimestream +# This will be populated when the first frame comes in +primary_idxs = {} + + +def load_frame_data(frame): + """ + Returns detector data from a G3Stream. + + Returns: + times : np.ndarray + Array with shape (nsamps) of timestamps (sec) + data : np.ndarray + Array with shape (nchans, nsamps) of detector phase data (phi0) + """ + primary = frame['primary'] + if isinstance(primary, core.G3TimesampleMap): + times = np.array(primary['UnixTime']) / 1e9 + else: + if not primary_idxs: + for i, name in enumerate(frame['primary'].names): + primary_idxs[name] = i + times = np.array(primary.data[primary_idxs['UnixTime']]) / 1e9 + + d = frame['data'] + if isinstance(d, core.G3TimestreamMap): + nchans, nsamps = len(d), d.n_samples + data = np.ndarray((nchans, nsamps), dtype=np.float32) + for i in range(nchans): + data[i] = d[f'r{i:0>4}'] * (2*np.pi) / 2**16 + + else: # G3SuperTimestream probably + data = d.data * (2*np.pi) / 2**16 + + return times, data + + +def sleep_while_running(duration, session, interval=1): + """ + Sleeps for a certain duration as long as a session object's status is + 'starting' or 'running'. If the session is changed to 'stopping', + this will quickly return False. If the sleep is completed without + interruption this will return True. + + Args + ---- + duration : float + Amount of time (sec) to sleep for. + session : OpSessions + Session whose status should be monitored + interval : float, optional + Polling interval (sec) to check the session status. Defaults to 1 sec. + """ + end_time = time.time() + duration + while session.status in ['starting', 'running']: + now = time.time() + if now >= end_time: + return True + time.sleep(min(interval, end_time - now)) + return False + + +class FIRFilter: + """ + Class for Finite Input Response filter. Filter phases are preserved between + `lfilt` calls so you can filter frame-based data. + """ + def __init__(self, b, a, nchans=None): + if nchans is None: + nchans = MAX_CHANS + self.b = b + self.a = a + self.z = np.zeros((nchans, len(b)-1)) + + def lfilt(self, data): + """Filters data in place""" + n = len(data) + data[:, :], self.z[:n] = signal.lfilter(self.b, self.a, data, axis=1,) + + @classmethod + def butter_highpass(cls, cutoff, fs, order=5): + """ + Creates an highpass butterworth FIR filter + + Args + ---- + cutoff : float + Cutoff freq (Hz) + fs : float + sample frequency (Hz) + order : int + Order of the filter + """ + nyq = 0.5 * fs + normal_cutoff = cutoff / nyq + b, a = signal.butter(order, normal_cutoff, btype='high', analog=False) + return cls(b, a) + + @classmethod + def butter_lowpass(cls, cutoff, fs, order=5): + """ + Creates an lowpass butterworth FIR filter + + Args + ---- + cutoff : float + Cutoff freq (Hz) + fs : float + sample frequency (Hz) + order : int + Order of the filter + """ + nyq = 0.5 * fs + normal_cutoff = cutoff / nyq + b, a = signal.butter(order, normal_cutoff, btype='low', analog=False) + return cls(b, a) + + +class RollingAvg: + """ + Class for calculating rolling averages across multiple frames. Useful + for simple filtering and calculating the rms. + + Args + ----- + N (int): + Number of samples to average together + nchans (int): + Number of channels this will be operating on. + """ + def __init__(self, N, nchans): + self.N = N + self.nchans = nchans + self.rollover = np.zeros((nchans, N)) + + def apply(self, data): + """ + Apply rolling avg to data array. + + Args + ----- + data (np.ndarray): + Array of data. Must be of shape (nchans, nsamps) + """ + nsamps = data.shape[1] + summed = 1./self.N * np.cumsum( + np.concatenate((self.rollover, data), axis=1), axis=1) + if self.N <= nsamps: + # Replace entire rollover array + self.rollover = data[:, -self.N:] + else: + # Roll back and just update with samples available + self.rollover = np.roll(self.rollover, -nsamps) + self.rollover[:, -nsamps:] = data[:, :] + + return summed[:, self.N:] - summed[:, :-self.N] + + +class FocalplaneConfig: + def __init__(self): + """ + Object to configure the focal-plane layout. + + Attributes + ------------- + chan_mask : np.ndarray + Map from absolute_smurf_chan --> Visual Element index + """ + self.num_dets = 0 + self.xs = [] + self.ys = [] + self.rots = [] + self.cnames = [] + self.templates = [] + self.value_names = [] + self.eqs = [] + self.eq_labels = [] + self.eq_color_is_dynamic = [] + self.cmaps = [] + self.chan_mask = np.full(MAX_CHANS, -1) + + def config_frame(self): + """ + Generates a config frame for lyrebird + """ + frame = core.G3Frame(core.G3FrameType.Wiring) + frame['x'] = core.G3VectorDouble(self.xs) + frame['y'] = core.G3VectorDouble(self.ys) + frame['cname'] = core.G3VectorString(self.cnames) + frame['rotation'] = core.G3VectorDouble(self.rots) + frame['templates'] = core.G3VectorString(self.templates) + frame['values'] = core.G3VectorString(self.value_names) + frame['color_is_dynamic'] = core.G3VectorBool(self.eq_color_is_dynamic) + frame['equations'] = core.G3VectorString(self.eqs) + frame['eq_labels'] = core.G3VectorString(self.eq_labels) + frame['cmaps'] = core.G3VectorString(self.cmaps) + return frame + + def add_vis_elem(self, name, x, y, rot, value_names, eqs, eq_labels, cmaps, + template, abs_smurf_chan, eq_color_is_dynamic): + """ + Adds a visual element to the focal-plane. + + Args + ---- + name : str + Name of the channel + x : float + x-coord of the element + y : float + y-coord of the element + rot : float + Rotation angle of the element (rads) + value_names : List[str] + List containing the names of the data values corresponding to this + visual element. All vis-elems must have the same number of + data-values. Data-values must be unique, such as + ``/rms`` + eqs : List[str] + List of equations to be displayed by the visual element. Equations + are written in Polish Notation, and may contain a combination of + numbers and data-values. Data-values can be global as defined in + the lyrebird config file, or channel values as defined in the + ``value_names`` argument. There must be the same number of eqs per + visual element. + + Polish notation is a method of writing equations where operators + precede the operands, making it easier to parse and evaluate. For + example, the operation :math:`a + b` will be ``+ a b`` in polish + notation, and :math:`(a + b) / 2` can be written as ``/ + a b 2``. + See the magpie docs page for a full list of operators that are + accepted by lyrebird. + eq_labels : List[str] + List of strings used to label equations in the lyrebird gui. This + list must have the same size as the ``eqs`` array. + cmaps : List[str] + List of colormaps to use for each equation. This must be the same + size as the ``eqs`` array. + template : str + Template used for this vis-elem. Templates are defined in the + lyrebird cfg file. + abs_smurf_chan : int + Absolute smurf-channel corresponding to this visual element. + eq_color_is_dynamic : List[bool] + List of booleans that determine if each equation's color-scale is + dynamic or fixed. This must be the same size as the ``eqs`` array. + """ + self.cnames.append(name) + self.xs.append(float(x)) + self.ys.append(float(y)) + self.rots.append(rot) + self.templates.append(template) + self.value_names.extend(value_names) + self.cmaps.extend(cmaps) + self.eqs.extend(eqs) + self.eq_labels.extend(eq_labels) + self.eq_color_is_dynamic.extend(eq_color_is_dynamic) + self.chan_mask[abs_smurf_chan] = self.num_dets + self.num_dets += 1 + + @classmethod + def grid(cls, stream_id, xdim, ydim, ygap=0, offset=(0., 0.)): + """ + Creates a FocalplaneConfig object for a grid of channels. + + Args + ---- + stream_id : str + Stream-id for the magpie agent. This will be prepended to all + lyrebird data-val names. + xdim : int + Number of channels in the x-dim of the grid + ydim : int + Number of channels in the y-dim of the grid + ygap : int + A small gap will be added every ``ygap`` rows, to better organize + channels. + offset : Tuple(float, float) + Global offset of the grid with respect to the lyrebird + coordinate-system + """ + fp = cls() + xs, ys = np.arange(xdim), np.arange(ydim) + xs = xs + offset[0] + ys = ys + offset[1] + + # Adds gaps every ygap rows + if ygap > 0: + ys = ys + .5 * (ys // ygap) + + template = 'box' + cmaps = ['red_cmap', 'blue_cmap'] + eq_color_is_dynamic = [True, False] + for i in range(xdim * ydim): + x, y = xs[i % xdim], ys[i // xdim] + name = f"{stream_id}/channel_{i}" + value_names = [f'{name}/raw', f'{name}/rms'] + eqs = [f'{name}/raw', f'* {name}/rms rms_scale'] + eq_labels = ['raw', 'rms'] + fp.add_vis_elem(name, x, y, 0, value_names, eqs, eq_labels, cmaps, + template, i, eq_color_is_dynamic) + + return fp + + @classmethod + def from_csv(cls, stream_id, detmap_file, wafer_scale=1., offset=(0, 0)): + """ + Creates a FocalplaneConfig object from a detmap csv file. + + Args + ----- + stream_id : str + Stream-id for the magpie agent. This will be prepended to all + lyrebird data-val names. + detmap_file : str + Path to detmap csv file. + wafer_scale : int + Scalar to multiply against det x and y positions when translating + to lyrebird positions. Defaults to 1, meaning that the lyrebird + coordinate system will be the same as the det-map coordinate system, + so x and y will be in um. + offset : Tuple(float, float) + Global offset of the grid with respect to the lyrebird + coordinate-system. If wafer_scale is 1, this should be in um. + """ + import pandas as pd + fp = cls() + df = pd.read_csv(detmap_file) + + cmaps = [ + ['red_cmap', 'blue_cmap'], + ['blue_cmap', 'red_cmap'], + ] + templates = ["template_c0_p0", "template_c1_p0",] + + color_idxs = {} + ncolors = 0 + for i, row in df.iterrows(): + rot = 0 + try: + bandpass = int(row['bandpass']) + if bandpass in color_idxs: + cidx = color_idxs[bandpass] + else: + color_idxs[bandpass] = ncolors + ncolors += 1 + cidx = color_idxs[bandpass] + + template = templates[cidx] + if row['pol'].strip() == 'B': + rot = np.pi / 2 + except ValueError: + # Just skip detectors with unknown bandpass + continue + x, y = row['det_x'] * wafer_scale, row['det_y'] * wafer_scale + x += offset[0] + y += offset[1] + + name = f"{stream_id}/det_{i}" + eqs = [f'{name}/raw', f'* {name}/rms rms_scale'] + value_names = [f'{name}/raw', f'{name}/rms'] + eq_labels = ['raw', 'rms'] + + fp.add_vis_elem(name, x, y, rot, value_names, eqs, eq_labels, + cmaps[cidx], template, i, [True, False]) + + return fp + + +class MagpieAgent: + """ + Agent for processing streamed G3Frames, and sending data to lyrebird. + + Attributes + ----------- + target_rate : float + This is the target sample rate of data to be sent to lyrebird. + Incoming data will be downsampled to this rate before being sent out. + fp : FocalplaneConfig + This is the FocalplaneConfig object that contains info about what + channels are present in the focal-plane representation, and their + positions. + mask : np.ndarray + This is a channel mask that maps readout channel to + absolute-smurf-chan. Before a status frame containing the ChannelMask + is seen in the G3Stream, this defaults to being an identity mapping + which just sends the readout channel no. to itself. Once a status + frame with the channel mask is seen, this will be updated + out_queue : Queue + This is a queue containing outgoing G3Frames to be sent to lyrebird. + delay : float + The outgoing stream will attempt to enforce this delay between the + relative timestamps in the G3Frames and the real time to ensure a + smooth flow of data. This must be greater than the frame-aggregation + time of the SmurfStreamer or else lyrebird will update data in spurts. + avg1, avg2 : RollingAvg + Two Rolling Averagers which are used to calculate the rolling RMS data. + monitored_channels : list + List of monitored channels whose data should be sent to grafana. + This list will contain entries which look like + ``(readout_chan_number, field_name)``. + monitored_chan_sample_rate : float + Sample rate (Hz) to target when downsampling monitored channel data for + grafana. + """ + mask_register = 'AMCc.SmurfProcessor.ChannelMapper.Mask' + + def __init__(self, agent, args): + self.agent: ocs_agent.OCSAgent = agent + self.log = self.agent.log + self._running = False + + self.target_rate = args.target_rate + layout = args.layout.lower() + if layout == 'grid': + self.fp = FocalplaneConfig.grid( + args.stream_id, args.xdim, args.ydim, ygap=8, offset=args.offset + ) + elif layout == 'wafer': + if args.det_map is not None: + self.fp = FocalplaneConfig.from_csv( + args.stream_id, args.det_map, wafer_scale=args.wafer_scale, offset=args.offset + ) + else: + raise ValueError("CSV file must be set using the det-map arg if " + "using wafer layout") + + self.mask = np.arange(MAX_CHANS) + self.out_queue = queue.Queue(1000) + self.delay = args.delay + + self.avg1, self.avg2 = None, None + + self.monitored_channels = [] + self.monitored_chan_sample_rate = 10 + self.agent.register_feed( + 'detector_tods', record=True, + agg_params={'exclude_aggregator': True} + ) + + + @ocs_agent.param('target_rate', type=float) + def set_target_rate(self, session, params): + """set_target_rate(target_rate) + + Sets the target downsampled sample-rate of the data sent to lyrebird + + Args: + target_rate : float + Target sample rate for lyrebird (Hz) + """ + self.target_rate = params['target_rate'] + return True, f'Set target rate to {self.target_rate}' + + + @ocs_agent.param('delay', type=float) + def set_delay(self, session, params): + """set_delay(delay) + + Sets the target downsampled sample-rate of the data sent to lyrebird + + Args: + target_rate : float + Target sample rate for lyrebird (Hz) + """ + self.delay = params['delay'] + return True, f'Set delay param to {self.delay}' + + + @ocs_agent.param('chan_info', type=list, check=lambda x: len(x) <= 6) + @ocs_agent.param('sample_rate', type=float, default=10, + check=lambda x: 0 < x <= 20) + def set_monitored_channels(self, session, params): + """set_monitored_channels(chan_info, sample_rate=10) + + **Task** - Sets channels which will be monitored and have their + downsampled data sent to grafana. + + Field names can be manually specified in the chan_info list because it + may be helpful to set a consistent name for specific channels, such as + "in_transition", instead of using the autogenerated channel name, which + can change between tunes. Any additional field names used will remain + in the influx database, so be wary of programatically adding many of + them. + + Args + ------ + chan_info : list + List of channel info corresponding to channels to monitor. + Entries of this list can be: + + - ints: This will be interpreted as the readout-channel to + monitor. The field-name will be set to "r". + - tuples of length 2: Here the first element will be the readout + chan to monitor, and the second value will be the field name to + use for that channel. + + This list can be no more than 6 elements to limit how much detector + data is saved to the HK format. + + sample_rate : float + Target sample rate (Hz) to downsample detector data to. This must + be less than 20 Hz to limit how much detector data is saved to hk. + + """ + monitored_chans = [] + for ch_info in params['chan_info']: + if isinstance(ch_info, int): + field_name = f"r{ch_info:0>4}" + monitored_chans.append((ch_info,field_name)) + elif isinstance(ch_info, (tuple, list)): + monitored_chans.append(ch_info) + else: + raise ValueError( + f"ch_info (type {type(ch_info)}) must be of type int or " + "tuple" + ) + + self.monitored_channels = monitored_chans + self.monitored_chan_sample_rate = params['sample_rate'] + return True, "Set monitored channels" + + def _process_status(self, frame): + """ + Processes a status frame. This will set or update the channel + mask whenever the smurf metadata is updated. + """ + if 'session_id' not in frame: + return + if self.mask_register in frame['status']: + status = yaml.safe_load(frame['status']) + self.mask = np.array( + ast.literal_eval(status[self.mask_register]) + ) + + def _process_monitored_chans(self, times, data): + """ + Downsamples data for monitored channels and publishes tods to a grafana + feed. + """ + if not self.monitored_channels: + return + + target_rate = self.monitored_chan_sample_rate + if len(times) <= 1: + ds_factor = 1 + else: + input_rate = 1./np.median(np.diff(times)) + ds_factor = max(int(input_rate // target_rate), 1) + + sl = slice(None, None, ds_factor) + + times_out = times[sl] + for rc, field_name in self.monitored_channels: + if rc >= len(data): + self.log.warn( + f"Readout channel {rc} is larger than the number of" + f"streamed channels ({len(data)})! Data won't be published" + "to grafana." + ) + continue + _data = { + 'timestamps': times_out.tolist(), + 'block_name': field_name, + 'data': { + field_name: data[rc, sl].tolist() + } + } + self.agent.publish_to_feed('detector_tods', _data) + + def _process_data(self, frame, source_offset=0): + """ + Processes a Scan frame. If lyrebird is enabled, this will return a seq + of G3Frames that are formatted for lyrebird to ingest. + """ + if 'session_id' not in frame: + return [] + + # Calculate downsample factor + times_in, data_in = load_frame_data(frame) + times_in = times_in - source_offset + sample_rate = 1./np.median(np.diff(times_in)) + nsamps = len(times_in) + nchans = len(data_in) + + self._process_monitored_chans(times_in, data_in) + + if self.avg1 is None: + self.avg1 = RollingAvg(200, nchans) + self.avg2 = RollingAvg(200, nchans) + elif self.avg1.nchans != nchans: + self.log.warn(f"Channel count has changed! {self.avg1.nchans}->{nchans}") + self.avg1 = RollingAvg(200, nchans) + self.avg2 = RollingAvg(200, nchans) + + # Calc RMS + hpf_data = data_in - self.avg1.apply(data_in) # To high-pass filter + rms = np.sqrt(self.avg2.apply(hpf_data**2)) # To calc rolling rms + + ds_factor = sample_rate // self.target_rate + if np.isnan(ds_factor): # There is only one element in the timestream + ds_factor = 1 + ds_factor = max(int(ds_factor), 1) # Prevents downsample factors < 1 + + # Arrange output data structure + sample_idxs = np.arange(0, nsamps, ds_factor, dtype=np.int32) + num_frames = len(sample_idxs) + + times_out = times_in[sample_idxs] + + abs_chans = self.mask[np.arange(nchans)] + + raw_out = np.zeros((num_frames, self.fp.num_dets)) + rms_out = np.zeros((num_frames, self.fp.num_dets)) + + for i, c in enumerate(abs_chans): + if c >= len(self.fp.chan_mask): + continue + idx = self.fp.chan_mask[c] + if idx >= 0: + raw_out[:, idx] = data_in[i, sample_idxs] + rms_out[:, idx] = rms[i, sample_idxs] + + out = [] + for i in range(num_frames): + fr = core.G3Frame(core.G3FrameType.Scan) + fr['idx'] = 0 + fr['data'] = core.G3VectorDouble(raw_out[i]) + fr['timestamp'] = core.G3Time(times_out[i] * core.G3Units.s) + out.append(fr) + + fr = core.G3Frame(core.G3FrameType.Scan) + fr['idx'] = 1 + fr['data'] = core.G3VectorDouble(rms_out[i]) + fr['timestamp'] = core.G3Time(times_out[i] * core.G3Units.s) + out.append(fr) + return out + + + def read(self, session, params=None): + """read(src='tcp://localhost:4532') + + **Process** - Process for reading in G3Frames from a source or list of + sources. If this source is an address that begins with ``tcp://``, the + agent will attempt to connect to a G3NetworkSender at the specified + location. The ``src`` param can also be a filepath or list of filepaths + pointing to G3Files to be streamed. If a list of filenames is passed, + once the first file is finished streaming, subsequent files will be + streamed. + """ + + self._running = True + session.set_status('running') + + src_idx = 0 + if isinstance(params['src'], str): + sources = [params['src']] + else: + sources = params['src'] + + reader = None + source = None + source_offset = 0 + while self._running: + + if reader is None: + try: + source = sources[src_idx] + source_is_file = not source.startswith('tcp://') + reader = core.G3Reader(source, timeout=5) + except RuntimeError as e: + if source_is_file: + # Raise error if file cannot be found + raise e + else: + # If not a file, log error and try again + self.log.error("G3Reader could not connect! Retrying in 10 sec.") + time.sleep(10) + continue + + frames = reader.Process(None) + if not frames: + # If source is a file, start over with next file or break if + # finished all sources. If socket, just reset reader and try to + # reconnect + if source_is_file: + src_idx += 1 + if src_idx >= len(sources): + self.log.info("Finished reading all sources") + break + reader = None + continue + + frame = frames[0] + + # If this source is a file, this will shift the timestamps so that + # data lines up with the current timestamp instead of using the + # timestamps in the file + if source_is_file and (not source_offset): + source_offset = frame['time'].time / core.G3Units.s \ + - time.time() + elif not source_is_file: + source_offset = 0 + + if frame.type == core.G3FrameType.Wiring: + self._process_status(frame) + continue + elif frame.type == core.G3FrameType.Scan: + out = self._process_data(frame, source_offset=source_offset) + else: + continue + + for f in out: + # This will block until there's a free spot in the queue. + # This is useful if the src is a file and reader.Process does + # not block + self.out_queue.put(f) + return True, "Stopped read process" + + def _stop_read(self, session, params=None): + self._running = False + return True, "Stopping read process" + + + def stream_fake_data(self, session, params=None): + """stream_fake_data() + + **Process** - Process for streaming fake data. This will queue up + G3Frames full of fake data to be sent to lyrebird. + """ + self._run_fake_stream = True + ndets = self.fp.num_dets + chans = np.arange(ndets) + frame_start = time.time() + while self._run_fake_stream: + time.sleep(2) + frame_stop = time.time() + ts = np.arange(frame_start, frame_stop, 1./self.target_rate) + frame_start = frame_stop + nframes = len(ts) + + data_out = np.random.normal(0, 1, (nframes, ndets)) + data_out += np.sin(2*np.pi*ts[:, None] + .2 * chans[None, :]) + + for t, d in zip(ts, data_out): + fr = core.G3Frame(core.G3FrameType.Scan) + fr['idx'] = 0 + fr['data'] = core.G3VectorDouble(d) + fr['timestamp'] = core.G3Time(t * core.G3Units.s) + self.out_queue.put(fr) + + fr = core.G3Frame(core.G3FrameType.Scan) + fr['idx'] = 1 + fr['data'] = core.G3VectorDouble(np.sin(d)) + fr['timestamp'] = core.G3Time(t * core.G3Units.s) + self.out_queue.put(fr) + + return True, "Stopped fake stream process" + + + def _stop_stream_fake_data(self, session, params=None): + self._run_fake_stream = False + return True, "Stopping fake stream process" + + + @ocs_agent.param('dest', type=int) + def send(self, session, params=None): + """send(dest) + + **Process** - Process for sending outgoing G3Frames. This will query + the out_queue for frames to be sent to lyrebird. This will try to + regulate how fast it sends frames such that the delay between when the + frames are sent, and the timestamp of the frames are fixed. + + """ + self._send_running = True + + first_frame_time = None + stream_start_time = None + + sender = core.G3NetworkSender( + hostname='*', port=params['dest'], max_queue_size=1000 + ) + + sender.Process(self.fp.config_frame()) + session.set_status('running') + while session.status in ['starting', 'running']: + f = self.out_queue.get(block=True) + t = f['timestamp'].time / core.G3Units.s + now = time.time() + if first_frame_time is None: + first_frame_time = t + stream_start_time = now + + this_frame_time = stream_start_time + (t - first_frame_time) + self.delay + res = sleep_while_running(this_frame_time - now, session) + sender.Process(f) + + return True, "Stopped send process" + + def _send_stop(self, session, params=None): + self._send_running = False + return True, "Stopping send process" + + +def make_parser(parser=None): + if parser is None: + parser = argparse.ArgumentParser() + + pgroup = parser.add_argument_group('Agent Options') + pgroup.add_argument('--src', nargs='+', default='tcp://localhost:4532', + help="Address of incoming G3Frames.") + pgroup.add_argument('--dest', type=int, default=8675, + help="Port to serve lyrebird frames") + pgroup.add_argument('--stream-id', type=str, default='none', + help="Stream-id to use to distinguish magpie streams." + "This will be prepended to data-val names in lyrebird.") + pgroup.add_argument('--target-rate', '-t', type=float, default=20, + help="Target sample rate for data being sent to Lyrebird. " + "Detector data will be downsampled to this rate.") + pgroup.add_argument( + '--delay', type=float, default=5, + help="Delay (sec) between the timestamp of a G3Frame relative to the " + "initial frame, and when the frame should be sent to lyrebird. " + "This must be larger than the frame-aggregation time for smooth " + "update times in lyrebird." + ) + pgroup.add_argument('--layout', '-l', default='grid', choices=['grid', 'wafer'], + help="Focal plane layout style") + pgroup.add_argument('--xdim', type=int, default=64, + help="Number of pixels in x-dimension for grid layout") + pgroup.add_argument('--ydim', type=int, default=64, + help="Number of pixels in y-dimension for grid layout") + pgroup.add_argument('--wafer-scale', '--ws', type=float, default=50., + help="scale of wafer coordinates") + pgroup.add_argument('--det-map', type=str, help="Path to det-map csv file") + pgroup.add_argument('--fake-data', action='store_true', + help="If set, will stream fake data instead of listening to " + "a G3stream.") + pgroup.add_argument('--offset', nargs=2, default=[0, 0], type=float, + help="Offset of detector coordinates with respect to " + "lyrebird coordinate system") + pgroup.add_argument('--monitored-channels', nargs='+', type=int, default=[], + help="Readout channels to start monitoring on startup") + pgroup.add_argument('--monitored-channel-rate', type=float, default=10, + help="Target sample rate for monitored channels") + return parser + + +if __name__ == '__main__': + txaio.use_twisted() + txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) + parser = make_parser() + args = site_config.parse_args(agent_class='MagpieAgent', parser=parser) + + agent, runner = ocs_agent.init_site_agent(args) + magpie = MagpieAgent(agent, args) + + if args.fake_data: + read_startup = False + else: + read_startup = {'src': args.src} + + agent.register_process('read', magpie.read, magpie._stop_read, + startup=read_startup) + agent.register_process('stream_fake_data', magpie.stream_fake_data, + magpie._stop_stream_fake_data, startup=args.fake_data) + agent.register_process('send', magpie.send, magpie._send_stop, + startup={'dest': args.dest}) + agent.register_task('set_target_rate', magpie.set_target_rate) + agent.register_task('set_delay', magpie.set_delay) + agent.register_task( + 'set_monitored_channels', magpie.set_monitored_channels, + startup={'chan_info': args.monitored_channels, + 'sample_rate': args.monitored_channel_rate} + ) + + runner.run(agent, auto_reconnect=True) diff --git a/agents/meinberg_m1000/Dockerfile b/agents/meinberg_m1000/Dockerfile index 96760491b..d3d744f79 100644 --- a/agents/meinberg_m1000/Dockerfile +++ b/agents/meinberg_m1000/Dockerfile @@ -14,7 +14,7 @@ COPY meinberg_m1000_agent.py . RUN true # Copy in the MIBS -COPY mibs/ /usr/local/lib/python3.6/dist-packages/pysnmp/smi/mibs/ +COPY mibs/ /root/.pysnmp/mibs/ # Run agent on container startup ENTRYPOINT ["dumb-init", "python3", "-u", "meinberg_m1000_agent.py"] diff --git a/agents/meinberg_m1000/meinberg_m1000_agent.py b/agents/meinberg_m1000/meinberg_m1000_agent.py index ea5e35217..c5a9ca6e5 100644 --- a/agents/meinberg_m1000/meinberg_m1000_agent.py +++ b/agents/meinberg_m1000/meinberg_m1000_agent.py @@ -7,6 +7,7 @@ from autobahn.twisted.util import sleep as dsleep from twisted.internet.defer import inlineCallbacks +from twisted.internet import reactor from socs.snmp import SNMPTwister @@ -28,6 +29,8 @@ class MeinbergSNMP: Address of the M1000. port : int SNMP port to issue GETs to, default to 161. + version : int + SNMP version for communication (1, 2, or 3), defaults to 3. Attributes ---------- @@ -44,14 +47,15 @@ class MeinbergSNMP: float, respectively. "lastGet" is initialized as None, since no SNMP GET commands have been issued. oid_cache : dict - Cache of OID values and corresponding decoded values. Meant to pass to - session.data. + Cache of OID values and corresponding decoded values. Meant to be + passed directly to session.data. """ - def __init__(self, address, port=161): + def __init__(self, address, port=161, version=3): self.log = txaio.make_logger() self.address = address self.port = port + self.version = version self.snmp = SNMPTwister(address, port) # OIDs and how often to query them @@ -172,7 +176,7 @@ def _extract_oid_field_and_value(self, get_result): return field_name, oid_value, oid_description - def update_cache(self, get_result, time): + def update_cache(self, get_result, timestamp): """Update the OID Value Cache. The OID Value Cache is used to store each unique OID, the latest value, @@ -189,42 +193,43 @@ def update_cache(self, get_result, time): "description": "disabled", "lastGet": 1598543397.689727}} + Additionally there is connection status information under:: + + {"m1000_connection": + {"last_attempt": 1598543359.6326838, + "connected": True} + This method modifies self.oid_cache. Parameters ---------- get_result : pysnmp.smi.rfc1902.ObjectType Result from a pysnmp GET command. - time : float + timestamp : float Timestamp for when the SNMP GET was issued. """ - for item in get_result: - field_name, oid_value, oid_description = self._extract_oid_field_and_value(item) - if oid_value is None: - continue - - # Update OID Cache for session.data - self.oid_cache[field_name] = {"status": oid_value} - self.oid_cache[field_name]["lastGet"] = time - self.oid_cache[field_name]["description"] = oid_description - - def get_cache(self): - """Return the current cache. Should be used to pass cached values to - session.data. See MeinbergSNMP.update_cache() for a description of the cache - data structure. - - Returns - ------- - dict - The OID Value Cache, containing each OID, their latest status - value, description, and last updated time. - - """ - return self.oid_cache + try: + for item in get_result: + field_name, oid_value, oid_description = self._extract_oid_field_and_value(item) + if oid_value is None: + continue + + # Update OID Cache for session.data + self.oid_cache[field_name] = {"status": oid_value} + self.oid_cache[field_name]["lastGet"] = timestamp + self.oid_cache[field_name]["description"] = oid_description + self.oid_cache['m1000_connection'] = {'last_attempt': time.time(), + 'connected': True} + # This is a TypeError due to nothing coming back from the yield in + # run_snmp_get, so get_result is None here and can't be iterated. + except TypeError: + self.oid_cache['m1000_connection'] = {'last_attempt': time.time(), + 'connected': False} + raise ConnectionError('No SNMP response. Check your connection.') def _build_message(self, interval, get_result, time): - """Built the message for publication on an OCS Feed. + """Build the message for publication on an OCS Feed. For a given MIB timing interval, build a message for Feed publication. Each interval contains only the OIDs that are sampled on the same timing @@ -286,20 +291,26 @@ def run_snmp_get(self, session): continue # Issue SNMP GET command - result = yield self.snmp.get(get_list) + result = yield self.snmp.get(get_list, self.version) read_time = time.time() - self.update_cache(result, read_time) - message = self._build_message(interval, result, read_time) + # Do not publish if M1000 connection has dropped + try: + self.update_cache(result, read_time) + message = self._build_message(interval, result, read_time) + + # Update lastGet time + for mib in self.mib_timings: + if mib['oid'] in get_list: + mib['lastGet'] = read_time - # Update lastGet time - for mib in self.mib_timings: - if mib['oid'] in get_list: - mib['lastGet'] = read_time + self.log.debug("{msg}", msg=message) + session.app.publish_to_feed('m1000', message) + except ConnectionError as e: + self.log.error(f'{e}') - self.log.debug("{msg}", msg=message) - session.app.publish_to_feed('m1000', message) - session.data = self.get_cache() + # Update connection status in session.data + session.data = self.oid_cache class MeinbergM1000Agent: @@ -313,6 +324,8 @@ class MeinbergM1000Agent: Address of the M1000. port : int SNMP port to issue GETs to, default to 161. + version : int + SNMP version for communication (1, 2, or 3), defaults to 3. Attributes ---------- @@ -325,12 +338,13 @@ class MeinbergM1000Agent: txaio logger object, created by the OCSAgent """ - def __init__(self, agent, address, port=161): + def __init__(self, agent, address, port=161, version=3): self.agent = agent self.is_streaming = False self.log = self.agent.log - self.meinberg = MeinbergSNMP(address, port) + self.log.info(f'Using SNMP version {version}.') + self.meinberg = MeinbergSNMP(address, port, version) agg_params = { 'frame_length': 10*60 # [sec] @@ -378,7 +392,10 @@ def acq(self, session, params=None): "mbgLtNgEthPortLinkState_1": {"status": 1, "description": "up", - "lastGet": 1598543359.6326838}} + "lastGet": 1598543359.6326838} + "m1000_connection": + {"last_attempt": 1598543359.6326838, + "connected": True}} Note that session.data is populated within the :func:`MeinbergSNMP.run_snmp_get` call. @@ -386,12 +403,24 @@ def acq(self, session, params=None): if params is None: params = {} + # Make an initial attempt at connection. + # Allows us to fail early if misconfigured. + yield self.meinberg.run_snmp_get(session) + if not self.meinberg.oid_cache['m1000_connection'].get('connected', False): + self.log.error('No initial SNMP response.') + self.log.error('Either there is a network connection issue, ' + + 'or maybe you are using the wrong SNMP ' + + 'version. Either way, we are exiting.') + + reactor.callFromThread(reactor.stop) + return False, 'acq process failed - No connection to M1000' + self.is_streaming = True while self.is_streaming: yield self.meinberg.run_snmp_get(session) self.log.debug("{data}", data=session.data) - yield dsleep(0.1) + yield dsleep(1) return True, "Finished Recording" @@ -421,6 +450,9 @@ def make_parser(parser=None): pgroup.add_argument("--address", help="Address to listen to.") pgroup.add_argument("--port", default=161, help="Port to listen on.") + pgroup.add_argument("--snmp-version", default='3', choices=['1', '2', '3'], + help="SNMP version for communication. Must match " + + "configuration on the M1000.") return parser @@ -435,7 +467,8 @@ def make_parser(parser=None): agent, runner = ocs_agent.init_site_agent(args) listener = MeinbergM1000Agent(agent, address=args.address, - port=int(args.port)) + port=int(args.port), + version=int(args.snmp_version)) agent.register_process("acq", listener.acq, diff --git a/agents/ocs_plugin_so.py b/agents/ocs_plugin_so.py index bbed4fc20..6d0dcc99e 100644 --- a/agents/ocs_plugin_so.py +++ b/agents/ocs_plugin_so.py @@ -13,9 +13,8 @@ ('Lakeshore370Agent', 'lakeshore370/LS370_agent.py'), ('Lakeshore240Agent', 'lakeshore240/LS240_agent.py'), ('Keithley2230G-PSU', 'keithley2230G-psu/keithley_agent.py'), - ('PysmurfController', 'smurf/pysmurf_control.py'), + ('PysmurfController', 'pysmurf_controller/pysmurf_controller.py'), ('BlueforsAgent', 'bluefors/bluefors_log_tracker.py'), - ('HWPSimulatorAgent', 'hwp_sim/hwp_simulator_agent.py'), ('CryomechCPAAgent', 'cryomech_cpa/cryomech_cpa_agent.py'), ('LATRtXYStageAgent', 'xy_stage/xy_latrt_agent.py'), ('ACUAgent', 'acu/acu_agent.py'), diff --git a/agents/pfeiffer_tc400/Dockerfile b/agents/pfeiffer_tc400/Dockerfile new file mode 100644 index 000000000..c8970fccc --- /dev/null +++ b/agents/pfeiffer_tc400/Dockerfile @@ -0,0 +1,22 @@ +# SOCS Pfeiffer tc400 Agent +# socs Agent container for interacting with Pfeiffer TC 400 electronic drive unit +# using a moxa serial to ethernet controller converter. + +# Use socs base image +FROM socs:latest + +# Set the working directory to registry directory +WORKDIR /app/socs/agents/pfeiffer_tc400/ + +# Copy and install requirements +COPY requirements.txt . +RUN pip3 install -r requirements.txt + +## Copy this agent into the app/agents directory +COPY . . + +# Run agent on container startup +ENTRYPOINT ["dumb-init", "python3", "-u", "pfeiffer_tc400_agent.py"] + +CMD ["--site-hub=ws://crossbar:8001/ws", \ + "--site-http=http://crossbar:8001/call"] diff --git a/agents/pfeiffer_tc400/pfeiffer_tc400_agent.py b/agents/pfeiffer_tc400/pfeiffer_tc400_agent.py new file mode 100644 index 000000000..e30f72617 --- /dev/null +++ b/agents/pfeiffer_tc400/pfeiffer_tc400_agent.py @@ -0,0 +1,264 @@ +import time +import os +import socket +import argparse +import txaio +from os import environ + +from ocs import ocs_agent, site_config +from ocs.ocs_twisted import TimeoutLock + +on_rtd = os.environ.get('READTHEDOCS') == 'True' +if not on_rtd: + from pfeiffer_tc400_driver import PfeifferTC400 + + +class PfeifferTC400Agent: + """Agent to connect to a pfeiffer tc400 electronic drive unit controlling a + turbo pump via a serial-to-ethernet converter. + + Parameters + ---------- + ip_address : str + IP address for the serial-to-ethernet converter + port_number : int + Serial-to-ethernet converter port + turbo_address : int + An internal address used to communicate between the power supplies and + the tc400. Found on the front screen of the power supplies. + """ + def __init__(self, agent, ip_address, port_number, turbo_address): + self.agent = agent + self.log = agent.log + self.lock = TimeoutLock() + + self.job = None + self.ip_address = ip_address + self.port_number = port_number + self.turbo_address = turbo_address + self.monitor = False + + self.turbo = None + + # Registers Temperature and Voltage feeds + agg_params = { + 'frame_length': 10*60, + } + self.agent.register_feed('pfeiffer_turbo', + record=True, + agg_params=agg_params, + buffer_time=0) + + @ocs_agent.param('auto_acquire', default=False, type=bool) + def init(self, session, params): + """init(auto_acquire=False) + + **Task** - Initialize the connection to the turbo controller. + + Parameters + ---------- + auto_acquire: bool, optional + Default is False. Starts data acquisition after initialization + if True. + """ + + with self.lock.acquire_timeout(0) as acquired: + if not acquired: + return False, "Could not acquire lock" + + try: + self.turbo = PfeifferTC400(self.ip_address, + self.port_number, + self.turbo_address) + except socket.timeout as e: + self.log.error("Turbo Controller timed out" + + f"during connect with error {e}") + return False, "Timeout" + self.log.info("Connected to turbo controller") + + # Start data acquisition if requested in site-config + auto_acquire = params.get('auto_acquire', False) + if auto_acquire: + self.agent.start('acq') + + return True, 'Initialized Turbo Controller.' + + @ocs_agent.param('test_mode', default=False, type=bool) + @ocs_agent.param('wait', default=1, type=float) + def acq(self, session, params): + """acq(wait=1, test_mode=False) + + **Process** - Continuously monitor turbo motor temp and rotation speed + and send info to aggregator. + + The ``session.data`` object stores the most recent published values + in a dictionary. For example:: + + session.data = { + 'timestamp': 1598626144.5365012, + 'block_name': 'turbo_output', + 'data': { + "Turbo_Motor_Temp": 40.054, + "Rotation_Speed": 823.655, + "Error_Code": "Err001", + } + } + + Parameters + ---------- + wait: float, optional + time to wait between measurements [seconds]. Default=1s. + + """ + self.monitor = True + + while self.monitor: + with self.lock.acquire_timeout(1) as acquired: + if acquired: + data = { + 'timestamp': time.time(), + 'block_name': 'turbo_output', + 'data': {} + } + + try: + data['data']["Turbo_Motor_Temp"] = self.turbo.get_turbo_motor_temperature() + data['data']["Rotation_Speed"] = self.turbo.get_turbo_actual_rotation_speed() + data['data']['Error_Code'] = self.turbo.get_turbo_error_code() + except ValueError as e: + self.log.error(f"Error in collecting data: {e}") + continue + + self.agent.publish_to_feed('pfeiffer_turbo', data) + + # Allow this process to be queried to return current data + session.data = data + + else: + self.log.warn("Could not acquire in monitor turbo") + + time.sleep(params['wait']) + + if params['test_mode']: + break + + return True, "Finished monitoring turbo" + + def _stop_acq(self, session, params): + """Stop monitoring the turbo output.""" + if self.monitor: + self.monitor = False + return True, 'requested to stop taking data.' + else: + return False, 'acq is not currently running' + + @ocs_agent.param('_') + def turn_turbo_on(self, session, params): + """turn_turbo_on() + + **Task** - Turns the turbo on. + + """ + + with self.lock.acquire_timeout(1) as acquired: + if acquired: + ready = self.turbo.ready_turbo() + if not ready: + return False, "Setting to ready state failed" + time.sleep(1) + on = self.turbo.turn_turbo_motor_on() + if not on: + return False, "Turbo unable to be turned on" + else: + return False, "Could not acquire lock" + + return True, 'Turned turbo on' + + @ocs_agent.param('_') + def turn_turbo_off(self, session, params): + """turn_turbo_off() + + **Task** - Turns the turbo off. + + """ + + with self.lock.acquire_timeout(1) as acquired: + if acquired: + off = self.turbo.turn_turbo_motor_off() + if not off: + return False, "Turbo unable to be turned off" + time.sleep(1) + unready = self.turbo.unready_turbo() + if not unready: + return False, "Setting to ready state failed" + else: + return False, "Could not acquire lock" + + return True, 'Turned turbo off' + + @ocs_agent.param('_') + def acknowledge_turbo_errors(self, session, params): + """acknowledge_turbo_errors() + + **Task** - Sends an acknowledgment of the error code to the turbo. + + """ + with self.lock.acquire_timeout(1) as acquired: + if acquired: + self.turbo.acknowledge_turbo_errors() + else: + return False, "Could not acquire lock" + + return True, 'Acknowledged Turbo Errors.' + + +def make_parser(parser=None): + """Build the argument parser for the Agent. Allows sphinx to automatically + build documentation based on this function. + """ + if parser is None: + parser = argparse.ArgumentParser() + + # Add options specific to this agent. + pgroup = parser.add_argument_group('Agent Options') + pgroup.add_argument('--ip-address', type=str, help="Serial-to-ethernet " + + "converter ip address") + pgroup.add_argument('--port-number', type=int, help="Serial-to-ethernet " + + "converter port") + pgroup.add_argument('--turbo-address', type=int, help="Internal address " + + "used by power supplies") + pgroup.add_argument('--mode', type=str, help="Set to acq to run acq on " + + "startup") + + return parser + + +if __name__ == '__main__': + # Start logging + txaio.start_logging(level=environ.get("LOGLEVEL", "info")) + + parser = site_config.add_arguments() + + # Get the default ocs agrument parser + parser = make_parser() + args = site_config.parse_args(agent_class='PfeifferTC400Agent', + parser=parser) + + init_params = False + if args.mode == 'acq': + init_params = {'auto_acquire': True} + + agent, runner = ocs_agent.init_site_agent(args) + + p = PfeifferTC400Agent(agent, + args.ip_address, + int(args.port_number), + int(args.turbo_address)) + + agent.register_task('init', p.init, startup=init_params) + agent.register_task('turn_turbo_on', p.turn_turbo_on) + agent.register_task('turn_turbo_off', p.turn_turbo_off) + agent.register_task('acknowledge_turbo_errors', p.acknowledge_turbo_errors) + agent.register_process('acq', p.acq, p._stop_acq) + + runner.run(agent, auto_reconnect=True) diff --git a/agents/pfeiffer_tc400/pfeiffer_tc400_driver.py b/agents/pfeiffer_tc400/pfeiffer_tc400_driver.py new file mode 100644 index 000000000..4ebd44adb --- /dev/null +++ b/agents/pfeiffer_tc400/pfeiffer_tc400_driver.py @@ -0,0 +1,198 @@ +# Author: Michael Randall +# Email: mrandall@ucsd.edu + +# This Driver works by creating a TCP connection to a Moxa Ethernet to Serial Converter. +# It uses this Converter to send and receive serial messages with the Pfeiffer Vacuum controller. +# The Driver employs the serial package to creat the TCP connection +# It also uses a slightly modified version of a Pfeiffer Vacuum Protocol package found on GitHub + +import serial +from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import _send_data_request as send_data_request +from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import _send_control_command as send_control_command +from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import _read_gauge_response as read_gauge_response + +# Data type 0 from TC400 Manual Section 8.3 - Applied data types +PFEIFFER_BOOL = {'111111': True, + '000000': False} + + +class PfeifferTC400: + """Initiates a TCP connection with the Moxa serial to ethernet converter to send serial communications. + + Parameters + ---------- + moxa_ip_address: str + The IP address of the moxa box + moxa_port: int + The port number of the Moxa box that the turbo is connected to. + (e.g. 4001 for the first port) + turbo_address: int + The serial address of the turbo controller (e.g. 94) + Check the turbo for the address. + + Attributes + ---------- + ser: serial.Serial Object + The TCP connection with the Moxa used to send and receive communication. + turbo_address: int + The serial Address of the Turbo Controller. + """ + def __init__(self, moxa_ip_address, moxa_port, turbo_address): + self.ser = serial.serial_for_url('socket://{}:{}'.format(moxa_ip_address, moxa_port), + baudrate=9600, + bytesize=serial.EIGHTBITS, + parity=serial.PARITY_NONE, + stopbits=serial.STOPBITS_ONE, + timeout=3) + + self.turbo_address = turbo_address + + def get_turbo_motor_temperature(self): + """Gets the temperatures of the turbo rotor from the turbo controller. + + Returns + ------- + int + The rotor temperature of the turbo in Celsius. + """ + + send_data_request(self.ser, self.turbo_address, 346) + addr, rw, param_num, motor_temp = read_gauge_response(self.ser) + + return int(motor_temp) + + def get_turbo_actual_rotation_speed(self): + """Gets the current rotation speed of the turbo from the turbo controller. + + Returns + ------- + int + The current rotation speed of the turbo in Hz. + """ + + send_data_request(self.ser, self.turbo_address, 309) + + addr, rw, param_num, actual_rotation_speed = read_gauge_response(self.ser) + + return int(actual_rotation_speed) + + def get_turbo_set_rotation_speed(self): + """Gets the the rotation speed that the turbo is set to from the turbo controller. + This is the speed in Hz that the turbo motor will spin up to if turned on. + + Returns + ------- + int + The rotation speed that the turbo is set to in Hz + """ + + send_data_request(self.ser, self.turbo_address, 308) + + addr, rw, param_num, set_rotation_speed = read_gauge_response(self.ser) + + return int(set_rotation_speed) + + def get_turbo_error_code(self): + """Gets the current error code of the turbo from the turbo controller. + + Returns + ------- + str + The current error code of the turbo. + """ + send_data_request(self.ser, self.turbo_address, 303) + + addr, rw, param_num, error_code = read_gauge_response(self.ser) + + return error_code + + def unready_turbo(self): + """Unreadies the turbo. Does not cause the turbo to spin up. + Returns + ------- + bool + True for successful, False for failure. + """ + + send_control_command(self.ser, self.turbo_address, 10, "000000") + + addr, rw, param_num, turbo_response = read_gauge_response(self.ser) + + if turbo_response not in PFEIFFER_BOOL: + raise ValueError(f"Unrecognized response from turbo: {turbo_response}") + else: + return turbo_response == "111111" + + def ready_turbo(self): + """Readies the turbo for spinning. Does not cause the turbo to spin up. + + Returns + ------- + bool + True for successful, False for failure. + """ + + send_control_command(self.ser, self.turbo_address, 10, "111111") + + addr, rw, param_num, turbo_response = read_gauge_response(self.ser) + + if turbo_response not in PFEIFFER_BOOL: + raise ValueError(f"Unrecognized response from turbo: {turbo_response}") + else: + return turbo_response == "111111" + + def turn_turbo_motor_on(self): + """Turns the turbo motor on. The turbo must be readied before the motor will turn on. + This is what causes the turbo to actually spin up. + + Returns + ------- + bool + True for successful, False for failure. + """ + + send_control_command(self.ser, self.turbo_address, 23, "111111") + + addr, rw, param_num, turbo_response = read_gauge_response(self.ser) + + if turbo_response not in PFEIFFER_BOOL: + raise ValueError(f"Unrecognized response from turbo: {turbo_response}") + else: + return turbo_response == "111111" + + def turn_turbo_motor_off(self): + """Turns the turbo motor off. + + Returns + ------- + bool + True for successful, False for failure. + """ + + send_control_command(self.ser, self.turbo_address, 23, "000000") + + addr, rw, param_num, turbo_response = read_gauge_response(self.ser) + + if turbo_response not in PFEIFFER_BOOL: + raise ValueError(f"Unrecognized response from turbo: {turbo_response}") + else: + return turbo_response == "111111" + + def acknowledge_turbo_errors(self): + """Acknowledges the turbo errors. This is analagous to clearing the errors. + If the errors were fixed, the turbo will be able to be turned back on. + + Returns + ------- + bool + True for successful, False for failure. + """ + + send_control_command(self.ser, self.turbo_address, 9, "111111") + + addr, rw, param_num, turbo_response = read_gauge_response(self.ser) + + if turbo_response not in PFEIFFER_BOOL: + raise ValueError(f"Unrecognized response from turbo: {turbo_response}") + else: + return turbo_response == "111111" diff --git a/agents/pfeiffer_tc400/requirements.txt b/agents/pfeiffer_tc400/requirements.txt new file mode 100644 index 000000000..3b8e8fc39 --- /dev/null +++ b/agents/pfeiffer_tc400/requirements.txt @@ -0,0 +1 @@ +pfeiffer-vacuum-protocol==0.4 diff --git a/agents/pysmurf_archiver/Dockerfile b/agents/pysmurf_archiver/Dockerfile deleted file mode 100644 index f3aee2fcd..000000000 --- a/agents/pysmurf_archiver/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -# SOCS Pysmurf Archiver -# socs Agent for copying pysmurf data files and plots to archive - -# Use socs base image -FROM socs:latest - -# Set the working directory to registry directory -WORKDIR /app/socs/agents/pysmurf_archiver/ -# Copy this agent into the app/agents directory -COPY . . - -RUN apt-get install rsync -y - -RUN pip3 install -r requirements.txt - -# Run registry on container startup -ENTRYPOINT ["dumb-init", "python3", "-u", "pysmurf_archiver_agent.py"] - -CMD ["--site-hub=ws://crossbar:8001/ws", \ - "--site-http=http://crossbar:8001/call"] diff --git a/agents/pysmurf_archiver/pysmurf_archiver_agent.py b/agents/pysmurf_archiver/pysmurf_archiver_agent.py deleted file mode 100644 index e5ca9ca04..000000000 --- a/agents/pysmurf_archiver/pysmurf_archiver_agent.py +++ /dev/null @@ -1,311 +0,0 @@ -import argparse -import os -import time -import queue -import subprocess -from socs.util import get_db_connection, get_md5sum -import binascii -import datetime -from socs.db.pysmurf_files_manager import table as pysmurf_table_name - -from twisted.enterprise import adbapi - -on_rtd = os.environ.get('READTHEDOCS') == 'True' -if not on_rtd: - from ocs import ocs_agent, site_config - - -def create_local_path(file, data_dir): - """ - Creates local path from file database entry. - If subdirectories do not exist, they will be created. - - The file path will be: - - data_dir/<5 ctime digits>//_/ - - E.g. - - /data/pysmurf/15647/crate1slot2/1564799250_tune_band/outputs/1564799250_tuning_b1.txt - - In the case of duplicate datafiles being registered, the duplicates - will still be copied over to the location `new_path_name.{i}` where - `i` is the next unique index. - - - Args: - file (dict): - Database entry for file. - data_dir (string): - Path to base directory where files should be copied. - - Returns: - path (string): - Local pathname for file - """ - - filename = os.path.basename(file['path']) - - dt = file['timestamp'] - - action = file['action'] - - # First tries to get action timestamp entry - action_ts = file['action_timestamp'] - if action_ts is None: - try: - # If that doesn't exist, try to get the group timestamp from the filename - action_ts = int(os.path.splitext(filename)[0].split('_')[0]) - except ValueError as e: - # If that doesn't work, just use the file creation timestamp - action_ts = str(int(dt.timestamp())) - - dir_type = 'plots' if file['plot'] else 'outputs' - - subdir = os.path.join( - data_dir, # Base directory - f"{str(dt.timestamp()):.5}", # 5 ctime digits - file['pub_id'], # publisher id - f"{action_ts}_{action}", # grouptime_action - dir_type, # plots/outputs - ) - new_path = os.path.join(subdir, filename) - unique_path = new_path - i = 1 - while os.path.exists(unique_path): - unique_path = new_path + f'.{i}' - i += 1 - if unique_path != new_path: - print(f"Warning! Trying to archive duplicate of {new_path}! " - f"Will be archived as {unique_path} instead.") - - if not os.path.exists(subdir): - os.makedirs(subdir) - - return unique_path - - -class PysmurfArchiverAgent: - """ - Agent to archive pysmurf config files and plots. It should be run on the - computer where you want files to be archived, and must have access to the - database with the `pysmurf_files` table. - - Files must each have a unique filename (not just a unique path), and a - file type: (tuning, channel_map, final_registers, etc.). - The file will be copied over to the location: - - data_dir/<5 ctime digits>// - - Where data_dir is specified in the site-config or command line. For instance, - if ``data_dir = /data/pysmurf``, tuning dat for band 1 might be written to - - /data/pysmurf/15647/tuning/1564799250_tuning_b1.txt - - Args: - agent (ocs.ocs_agent.OCSAgent): - OCSAgent object which is running - data_dir (string): - Path to base directory where files will be copied. - targets (List[string]): - list of instance-id's of pysmurf-monitors who publish the files - that should be copied over - host (string): - Host name of the smurf-server that host original files - user (string): - username on host to use for copying - - Attributes: - agent (ocs.ocs_agent.OCSAgent): - OCSAgent object which is running - log (txaio.tx.Logger): - txaio logger object created by agent - data_dir (string): - Path to base directory where files will be copied. - targets (List[string]): - list of instance-id's of pysmurf-monitors who publish the files - that should be copied over - host (string): - Host name of the smurf-server that host original files - user (string): - username on host to use for copying - running (bool): - If run task is currently running - sql_config (dict): - sql login info - """ - def __init__(self, agent, data_dir=None, targets=[], host=None, user=None): - self.agent: ocs_agent.OCSAgent = agent - self.log = self.agent.log - - self.host = host - self.user = user - self.data_dir = data_dir - self.targets = targets - - self.running = False - - self.sql_config = { - 'user': os.environ["MYSQL_USER"], - 'passwd': os.environ["MYSQL_PASSWORD"], - 'database': 'files' - } - db_host = os.environ.get('MYSQL_HOST') - if db_host is not None: - self.sql_config['host'] = db_host - - def _copy_file(self, old_path, new_path, md5sum=None): - """ - Copies file from remote computer to host. - Called from the worker thread of the `run` process. - - If md5sum is specified, the md5sum of the new file will be checked against - the old one. If it does not match, copied file will be deleted. - - Args: - old_path (string): - Path to file on remote computer - new_path (string): - Path to file on local computer - md5sum (string, optional): - md5sum of file on remote computer. - - Returns: - success (bool): - True if file copied successfully. False otherwise - """ - # Copies file over from computer - cmd = ["rsync"] - - fname = old_path - if self.host is not None: - fname = f'{self.host}:' + fname - if self.user is not None: - fname = f'{self.user}@' + fname - - cmd.append(fname) - - cmd.append(new_path) - - self.log.debug(f"Running: {' '.join(cmd)}") - - try: - subprocess.check_output(cmd) - except subprocess.CalledProcessError as e: - self.log.error("rsync call failed:\n{e}", e=e) - return False - - # Verify md5sum - new_md5 = get_md5sum(new_path) - if new_md5 != md5sum: - os.remove(new_path) - self.log.error("{file} copy failed. md5sums do not match.", file=old_path) - return False - - self.log.debug(f"Successfully copied {old_path} to {new_path}") - return True - - - def run(self, session, params=None): - """run() - - **Process** - Main run process. - - Each iteration this will query the pysmurf_files database to find any - uncopied files from specified targets, and attempts to copy them to the - local computer. On success, it'll update the path to the local path and - set `copied=1`. On failure, it'll increment the `failed_copy_attempts` - counter. - - """ - - self.running = True - session.set_status('running') - while self.running: - - with get_db_connection(**self.sql_config) as con: - cur = con.cursor(dictionary=True) - - query = """ - SELECT * FROM {} - WHERE copied=0 AND failed_copy_attempts<5 AND instance_id IN ({}) - """.format(pysmurf_table_name, ", ".join(["%s" for _ in self.targets])) - - cur.execute(query, self.targets) - - files = cur.fetchall() - if files: - self.log.debug(f"Found {len(files)} uncopied files.") - - for f in files: - new_path = create_local_path(f, self.data_dir) - - md5sum = binascii.hexlify(f['md5sum']).decode() - if self._copy_file(f['path'], new_path, md5sum=md5sum): - # If file copied successfully - self.log.debug("Successfully coppied file {}".format(f['path'])) - query = f""" - UPDATE {pysmurf_table_name} SET path=%s, copied=1 - WHERE id=%s - """ - cur.execute(query, (new_path, f['id'])) - else: - self.log.debug("Failed to copy {}".format(f['path'])) - - query = f""" - UPDATE {pysmurf_table_name} - SET failed_copy_attempts = failed_copy_attempts + 1 - WHERE id=%s - """ - cur.execute(query, (f['id'],)) - - con.commit() - - time.sleep(5) - - return True, "Stopped archiving data." - - def _stop(self, session, params=None): - """ Stopper for run process """ - session.set_status('stopping') - self.running = False - - -def make_parser(parser=None): - if parser is None: - parser = argparse.ArgumentParser() - - pgroup = parser.add_argument_group('Agent Options') - pgroup.add_argument('--data-dir', type=str, - help="Directory where pysmurf data should be copied") - pgroup.add_argument('--user', type=str, - help="User to connect to use with fsync") - pgroup.add_argument('--host', type=str, - help="host from which files should be copied") - pgroup.add_argument('--target', nargs="+", - help="instance-ids of pysmurf target(s) which should archived") - pgroup.add_argument('--mode', type=str, choices=['idle', 'init', 'run'], - help="Initial mode of agent.") - - return parser - - -if __name__ == '__main__': - parser = make_parser() - args = site_config.parse_args(agent_class='PysmurfArchiverAgent', - parser=parser) - - if args.target is None: - raise Exception("Argument --target is required") - - if type(args.target) == str: - args.target = [args.target] - agent, runner = ocs_agent.init_site_agent(args) - - archiver = PysmurfArchiverAgent(agent, data_dir=args.data_dir, - targets=args.target, - host=args.host) - - agent.register_process('run', archiver.run, archiver._stop, startup=True) - - runner.run(agent, auto_reconnect=True) diff --git a/agents/pysmurf_archiver/requirements.txt b/agents/pysmurf_archiver/requirements.txt deleted file mode 100644 index 29966a9a0..000000000 --- a/agents/pysmurf_archiver/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -mysql-connector>=2.1.6 diff --git a/agents/pysmurf_controller/Dockerfile b/agents/pysmurf_controller/Dockerfile index fdea27508..da8443ac4 100644 --- a/agents/pysmurf_controller/Dockerfile +++ b/agents/pysmurf_controller/Dockerfile @@ -1,11 +1,9 @@ -FROM simonsobs/sodetlib:v0.3.0-101-gccbc921 +FROM simonsobs/sodetlib:v0.4.0 WORKDIR /app # SOCS installation -RUN git clone https://github.com/simonsobs/socs.git \ - && pip3 install -r socs/requirements.txt \ - && pip3 install -e socs +RUN python3 -m pip install --src=/app/ -e git+https://github.com/simonsobs/socs.git#egg=socs ENV OCS_CONFIG_DIR /config diff --git a/agents/pysmurf_controller/pysmurf_controller.py b/agents/pysmurf_controller/pysmurf_controller.py index c04648b0a..85ee21410 100644 --- a/agents/pysmurf_controller/pysmurf_controller.py +++ b/agents/pysmurf_controller/pysmurf_controller.py @@ -1,21 +1,30 @@ from twisted.internet import reactor, protocol, threads from twisted.python.failure import Failure -from twisted.internet.error import ProcessDone, ProcessTerminated from twisted.internet.defer import inlineCallbacks, Deferred from autobahn.twisted.util import sleep as dsleep from twisted.logger import Logger, FileLogObserver +import matplotlib +matplotlib.use('Agg') +import sodetlib as sdl +from sodetlib.operations import ( + uxm_setup, uxm_relock, bias_steps, iv, bias_dets +) +from sodetlib.det_config import DetConfig +import numpy as np + import sys from typing import Optional import time import os import argparse -on_rtd = os.environ.get('READTHEDOCS') == 'True' -if not on_rtd: - from ocs import ocs_agent, site_config, ocs_twisted - from ocs.ocs_agent import log_formatter - from ocs.ocs_twisted import TimeoutLock +from ocs import ocs_agent, site_config +from ocs.ocs_agent import log_formatter +from ocs.ocs_twisted import TimeoutLock + + +NBIASLINES = 12 class PysmurfScriptProtocol(protocol.ProcessProtocol): @@ -86,18 +95,24 @@ class PysmurfController: txaio logger object created by agent prot (PysmurfScriptProtocol): protocol used to call and monitor external pysmurf scripts - protocol_lock (ocs.ocs_twisted.TimeoutLock): + lock (ocs.ocs_twisted.TimeoutLock): lock to protect multiple pysmurf scripts from running simultaneously. + slot (int): + ATCA Slot of the smurf-card this agent is commanding. """ def __init__(self, agent, args): self.agent: ocs_agent.OCSAgent = agent self.log = agent.log self.prot = None - self.protocol_lock = TimeoutLock() + self.lock = TimeoutLock() self.current_session = None + self.slot = args.slot + if self.slot is None: + self.slot = os.environ['SLOT'] + if args.monitor_id is not None: self.agent.subscribe_on_start( self._on_session_data, @@ -119,6 +134,19 @@ def _on_session_data(self, _data): if isinstance(data['payload'], str): self.current_session.add_message(data['payload']) + def _get_smurf_control(self, session=None, load_tune=True, **kwargs): + """ + Gets pysmurf and det-config instances for sodetlib functions. + """ + cfg = DetConfig() + cfg.load_config_files(slot=self.slot) + S = cfg.get_smurf_control(**kwargs) + if load_tune: + S.load_tune(cfg.dev.exp['tunefile']) + S._ocs_session = session + return S, cfg + + @inlineCallbacks def _run_script(self, script, args, log, session): """ @@ -136,10 +164,10 @@ def _run_script(self, script, args, log, session): or False to not log at all. """ - with self.protocol_lock.acquire_timeout(0, job=script) as acquired: + with self.lock.acquire_timeout(0, job=script) as acquired: if not acquired: return False, "The requested script cannot be run because " \ - "script {} is already running".format(self.protocol_lock.job) + "script {} is already running".format(self.lock.job) self.current_session = session try: @@ -209,7 +237,8 @@ def run(self, session, params=None): This would result in the following session.data object:: - >>> { + >>> response.session['data'] + { 'datafile': '/data/smurf_data/20200316/1584401673/outputs/1584402020.dat', 'active_channels': [0,1,2,3,4] } @@ -233,33 +262,506 @@ def abort(self, session, params=None): self.prot.transport.signalProcess('KILL') return True, "Aborting process" - @inlineCallbacks - def tune_squids(self, session, params=None): - """tune_squids(args=[], log=True) + @ocs_agent.param('poll_interval', type=float, default=10) + def check_state(self, session, params=None): + """check_state(poll_interval=10) + + **Process** - Continuously checks the current state of the smurf. This + will not modify the smurf state, so this task can be run in conjunction + with other smurf operations. This will continuously poll smurf metadata + and update the ``session.data`` object. + + Args + ----- + poll_interval : float + Time (sec) between updates. + + Notes + ------- + The following data will be written to the session.data object:: + + >> response.session['data'] + { + 'channel_mask': Array of channels that are streaming data, + 'downsample_factor': downsample_factor, + 'agg_time': Buffer time per G3Frame (sec), + 'open_g3stream': True if data is currently streaming to G3, + 'pysmurf_action': Current pysmurf action, + 'pysmurf_action_timestamp': Current pysmurf-action timestamp, + 'stream_tag': stream-tag for the current g3 stream, + 'last_update': Time that session-data was last updated, + 'stream_id': Stream-id of the controlled smurf instance + } + """ + S, cfg = self._get_smurf_control(load_tune=False, no_dir=True) + reg = sdl.Registers(S) - **Task** - Runs the fake script /config/scripts/pysmurf/tune_squids.py + session.set_status('running') + kw = {'retry_on_fail': False} + while session.status in ['starting', 'running']: + try: + d = dict( + channel_mask = S.get_channel_mask(**kw).tolist(), + downsample_factor = S.get_downsample_factor(**kw), + agg_time = reg.agg_time.get(**kw), + open_g3stream = reg.open_g3stream.get(**kw), + pysmurf_action = reg.pysmurf_action.get(**kw, as_string=True), + pysmurf_action_timestamp = reg.pysmurf_action_timestamp.get(**kw), + stream_tag = reg.stream_tag.get(**kw, as_string=True), + last_update = time.time(), + stream_id = cfg.stream_id, + ) + session.data.update(d) + except RuntimeError: + self.log.warn("Could not connect to epics server! Waiting and " + "then trying again") + + time.sleep(params['poll_interval']) + + return True, "Finished checking state" + + def _stop_check_state(self, session, params): + """Stopper for check state process""" + session.set_status('stopping') + + @ocs_agent.param("duration", default=None, type=float) + @ocs_agent.param('kwargs', default=None) + @ocs_agent.param('load_tune', default=True, type=bool) + def stream(self, session, params): + """stream(duration=None) + + **Process** - Stream smurf data. If a duration is specified, stream + will end after that amount of time. If unspecified, the stream will run + until the stop function is called. + + Args + ----- + duration : float, optional + If set, determines how many seconds to stream data. By default, + will leave stream open until stop function is called. + kwargs : dict + A dictionary containing additional keyword arguments to pass + to sodetlib's ``stream_g3_on`` function + load_tune : bool + If true, will load a tune-file to the pysmurf object on + instantiation. + + Notes + ------ + The following data will be written to the session.data object:: + + >> response.session['data'] + { + 'stream_id': Stream-id for the slot, + 'sid': Session-id for the streaming session, + } + """ + if params['kwargs'] is None: + params['kwargs'] = {} - Args: - args (list, optional): - List of command line arguments to pass to the script. - Defaults to []. - log (string/bool, optional): - Determines if and how the process's stdout should be logged. - You can pass the path to a logfile, True to use the agent's log, - or False to not log at all. + with self.lock.acquire_timeout(0, job='stream') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." + + S, cfg = self._get_smurf_control(session=session, + load_tune=params['load_tune']) + + stop_time = None + if params['duration'] is not None: + stop_time = time.time() + params['duration'] + + session.data['stream_id'] = cfg.stream_id + session.data['sid'] = sdl.stream_g3_on(S, **params['kwargs']) + session.set_status('running') + while session.status in ['starting', 'running']: + if stop_time is not None: + if time.time() > stop_time: + break + time.sleep(1) + sdl.stream_g3_off(S) + + return True, 'Finished streaming data' + + def _stream_stop(self, session, params=None): + session.set_status('stopping') + return True, "Requesting to end stream" + + @ocs_agent.param('bands', default=None) + @ocs_agent.param('kwargs', default=None) + def uxm_setup(self, session, params): + """uxm_setup(bands=None, kwargs=None) + + **Task** - Runs first-time setup procedure for a UXM. This will run the + following operations: + + 1. Setup Amps (~1 min) + 2. Estimate attens if attens are not already set in the device cfg + (~1 min / band) + 3. Estimate phase delay (~1 min / band) + 4. Setup tune (~7 min / band) + 5. Setup tracking param (~30s / band) + 6. Measure noise (~45s) + + See the `sodetlib setup docs + `_ + for more information on the sodetlib setup procedure and allowed + keyword arguments. + + Args + ----- + bands : list, int + Bands to set up. Defaults to all. + kwargs : dict + Dict containing additional keyword args to pass to the uxm_setup + function. + + Notes + ------- + SODETLIB functions such as ``uxm_setup`` and any other functions called + by ``uxm_setup`` will add relevant data to the ``session.data`` object + to a unique key. For example, if all is successful ``session.data`` + may look like:: + + >> response.session['data'] + { + 'timestamps': [('setup_amps', 1651162263.0204525), ...], + 'setup_amps_summary': { + 'success': True, + 'amp_50k_Id': 15.0, + 'amp_hemt_Id': 8.0, + 'amp_50k_Vg': -0.52, + 'amp_hemt_Vg': -0.829, + }, + 'setup_phase_delay': { + 'bands': [0, 1, ...] + 'band_delay_us': List of band delays + }, + 'noise': { + 'band_medians': List of median white noise for each band + } + } + """ + if params['kwargs'] is None: + params['kwargs'] = {} + with self.lock.acquire_timeout(0, job='uxm_setup') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." + + S, cfg = self._get_smurf_control(session=session) + session.set_status('running') + self.log.info("Starting UXM setup") + success, summary = uxm_setup.uxm_setup( + S, cfg, bands=params['bands'], **params['kwargs'] + ) + + if not success: + final_step = session.data['timestamps'][-1][0] + return False, f"Failed on step {final_step}" + else: + return True, "Completed full UXM-Setup procedure" + + @ocs_agent.param('bands', default=None) + @ocs_agent.param('kwargs', default=None) + def uxm_relock(self, session, params): + """uxm_relock(bands=None, kwargs=None) + + **Task** - Relocks detectors to existing tune if setup has already been + run. Runs the following operations: + + 1. Setup Amps (~1 min) + 2. Relocks detectors, setup notches (if requested), and serial + gradient descent / eta scan (~5 min / band) + 3. Tracking setup (~20s / band) + 4. Noise check (~45s) + + See the `sodetlib relock docs + `_ + for more information on the sodetlib relock procedure and allowed + keyword arguments. + + Args + ----- + bands : list, int + Bands to set up. Defaults to all. + kwargs : dict + Dict containing additional keyword args to pass to the uxm_relock + function. + + Notes + ------- + SODETLIB functions such as ``uxm_relock`` and any other functions called + will add relevant data to the ``session.data`` object to a unique key. + For example, if all is successful ``session.data`` may look like:: + + >> response.session['data'] + { + 'timestamps': [('setup_amps', 1651162263.0204525), ...], + 'setup_amps_summary': { + 'success': True, + 'amp_50k_Id': 15.0, + 'amp_hemt_Id': 8.0, + 'amp_50k_Vg': -0.52, + 'amp_hemt_Vg': -0.829, + }, + 'noise': { + 'band_medians': List of median white noise for each band + } + } """ - if params is None: - params = {} + if params['kwargs'] is None: + params['kwargs'] = {} - ok, msg = yield self._run_script( - '/config/scripts/pysmurf/tune_squids.py', - params.get('args', []), - params.get('log', True), - session - ) + with self.lock.acquire_timeout(0, job='uxm_relock') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." - return ok, msg + session.set_status('running') + S, cfg = self._get_smurf_control(session=session) + success, summary = uxm_relock.uxm_relock( + S, cfg, bands=params['bands'], **params['kwargs'] + ) + return success, "Finished UXM Relock" + + @ocs_agent.param('duration', default=30., type=float) + @ocs_agent.param('kwargs', default=None) + def take_noise(self, session, params): + """take_noise(duration=30., kwargs=None) + + **Task** - Takes a short timestream and calculates noise statistics. + Median white noise level for each band will be stored in the session + data. See the `sodetlib noise docs + `_ for more + information on the noise function and possible keyword arguments. + + Args + ----- + duration : float + Duration of timestream to take for noise calculation. + kwargs : dict + Dict containing additional keyword args to pass to the take_noise + function. + + Notes + ------- + Median white noise levels for each band will be stored in the + session.data object, for example:: + + >> response.session['data'] + { + 'noise': { + 'band_medians': List of median white noise for each band + } + } + """ + if params['kwargs'] is None: + params['kwargs'] = {} + + with self.lock.acquire_timeout(0, job='take_noise') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." + + session.set_status('running') + S, cfg = self._get_smurf_control(session=session) + sdl.noise.take_noise(S, cfg, params['duration'], **params['kwargs']) + return True, "Finished taking noise" + + @ocs_agent.param('kwargs', default=None) + def take_bgmap(self, session, params): + """take_bgmap(kwargs=None) + + **Task** - Takes a bias-group map. This will calculate the number of + channels assigned to each bias group and put that into the session data + object along with the filepath to the analyzed bias-step output. See + the `bias steps docs page `_ + for more information on what additional keyword arguments can be + passed. + + Args + ---- + kwargs : dict + Additional kwargs to pass to take_bgmap function. + + Notes + ------ + The filepath of the BiasStepAnalysis object and the number of channels + assigned to each bias group will be written to the session.data + object:: + + >> response.session['data'] + { + 'nchans_per_bg': [123, 183, 0, 87, ...], + 'filepath': /path/to/bias_step_file/on/smurf_server.npy, + } + + """ + if params['kwargs'] is None: + params['kwargs'] = {} + + with self.lock.acquire_timeout(0, job='take_bgmap') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." + + session.set_status('running') + S, cfg = self._get_smurf_control(session=session) + bsa = bias_steps.take_bgmap(S, cfg, **params['kwargs']) + nchans_per_bg = [0 for _ in range(NBIASLINES + 1)] + for bg in range(NBIASLINES): + nchans_per_bg[bg] = int(np.sum(bsa.bgmap == bg)) + nchans_per_bg[-1] = int(np.sum(bsa.bgmap == -1)) + + session.data = { + 'nchans_per_bg': nchans_per_bg, + 'filepath': bsa.filepath, + } + return True, "Finished taking bgmap" + + @ocs_agent.param('kwargs', default=None) + def take_iv(self, session, params): + """take_iv(kwargs=None) + + **Task** - Takes an IV. This will add the normal resistance array and + channel info to the session data object along with the analyzed IV + filepath. See the `sodetlib IV docs page + `_ + for more information on what additional keyword arguments can be passed + in. + + Args + ---- + kwargs : dict + Additional kwargs to pass to the ``take_iv`` function. + + Notes + ------ + The following data will be written to the session.data object:: + + >> response.session['data'] + { + 'bands': Bands number of each resonator + 'channels': Channel number of each resonator + 'bgmap': BGMap assignment for each resonator + 'R_n': Normal resistance for each resonator + 'filepath': Filepath of saved IVAnalysis object + } + """ + if params['kwargs'] is None: + params['kwargs'] = {} + + with self.lock.acquire_timeout(0, job='take_iv') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." + + session.set_status('starting') + S, cfg = self._get_smurf_control() + iva = iv.take_iv(S, cfg, **params['kwargs']) + session.data = { + 'bands': iva.bands.tolist(), + 'channels': iva.channels.tolist(), + 'bgmap': iva.bgmap.tolist(), + 'R_n': iva.R_n.tolist(), + 'filepath': iva.filepath, + } + return True, "Finished taking IV" + + @ocs_agent.param('kwargs', default=None) + def take_bias_steps(self, session, params): + """take_bias_steps(kwargs=None) + + **Task** - Takes bias_steps and saves the output filepath to the + session data object. See the `sodetlib bias step docs page + `_ + for more information on bias steps and what kwargs can be passed in. + + Args + ---- + kwargs : dict + Additional kwargs to pass to ``take_bais_steps`` function. + + Notes + ------ + The following data will be written to the session.data object:: + + >> response.session['data'] + { + 'filepath': Filepath of saved BiasStepAnalysis object + } + """ + + if params['kwargs'] is None: + params['kwargs'] = {} + + with self.lock.acquire_timeout(0, job='bias_steps') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." + + session.set_status('starting') + S, cfg = self._get_smurf_control(session=session) + bsa = bias_steps.take_bias_steps( + S, cfg, **params['kwargs'] + ) + session.data = { + 'filepath': bsa.filepath + } + + return True, "Finished taking bias steps" + + @ocs_agent.param('rfrac', default=(0.3, 0.6)) + @ocs_agent.param('kwargs', default=None) + def bias_dets(self, session, params): + """bias_dets(rfrac=(0.3, 0.6), kwargs=None) + + **Task** - Biases detectors to a target Rfrac value or range. This + function uses IV results to determine voltages for each bias-group. If + rfrac is set to be a value, the bias voltage will be set such that the + median rfrac across all channels is as close as possible to the set + value. If a range is specified, the voltage will be chosen to maximize + the number of channels in that range. + + See the sodetlib docs page for `biasing dets into transition + `_ + for more information on the functions and additional keyword args that + can be passed in. + + Args + ------- + rfrac : float, tuple + Target rfrac range to aim for. If this is a float, bias voltages + will be chosen to get the median rfrac of each bias group as close + as possible to that value. If + kwargs : dict + Additional kwargs to pass to the ``bias_dets`` function. + + Notes + ------ + The following data will be written to the session.data object:: + + >> response.session['data'] + { + 'biases': List of voltage bias values for each bias-group + } + """ + if params['kwargs'] is None: + params['kwargs'] = {} + + with self.lock.acquire_timeout(0, job='bias_steps') as acquired: + if not acquired: + return False, f"Operation failed: {self.lock.job} is running." + + session.set_status('running') + S, cfg = self._get_smurf_control(session=session) + if isinstance(params['rfrac'], (int, float)): + biases = bias_dets.bias_to_rfrac( + S, cfg, rfrac=params['rfrac'], **params['kwargs'] + ) + else: + biases = bias_dets.bias_to_rfrac( + S, cfg, rfrac_range=params['rfrac'], **params['kwargs'] + ) + + session.data['biases'] = biases.tolist() + + return True, "Finished taking bias steps" def make_parser(parser=None): @@ -273,10 +775,12 @@ def make_parser(parser=None): pgroup.add_argument('--monitor-id', '-m', type=str, help="Instance id for pysmurf-monitor corresponding to " "this pysmurf instance.") - + pgroup.add_argument('--slot', type=int, + help="Smurf slot that this agent will be controlling") + pgroup.add_argument('--poll-interval', type=float, + help="Time between check-state polls") return parser - if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='PysmurfController', parser=parser) @@ -286,6 +790,22 @@ def make_parser(parser=None): agent.register_task('run', controller.run, blocking=False) agent.register_task('abort', controller.abort, blocking=False) - agent.register_task('tune_squids', controller.tune_squids, blocking=False) + + startup_pars = {} + if args.poll_interval is not None: + startup_pars['poll_interval'] = args.poll_interval + agent.register_process( + 'check_state', controller.check_state, controller._stop_check_state, + startup=startup_pars + ) + agent.register_process( + 'stream', controller.stream, controller._stream_stop + ) + agent.register_task('uxm_setup', controller.uxm_setup) + agent.register_task('uxm_relock', controller.uxm_relock) + agent.register_task('take_bgmap', controller.take_bgmap) + agent.register_task('take_iv', controller.take_iv) + agent.register_task('take_bias_steps', controller.take_bias_steps) + agent.register_task('take_noise', controller.take_noise) runner.run(agent, auto_reconnect=True) diff --git a/agents/pysmurf_controller/requirements.txt b/agents/pysmurf_controller/requirements.txt index ebd282170..1edd89c5b 100644 --- a/agents/pysmurf_controller/requirements.txt +++ b/agents/pysmurf_controller/requirements.txt @@ -1,2 +1 @@ -mysql-connector>=2.1.6 dumb-init diff --git a/agents/pysmurf_monitor/pysmurf_monitor.py b/agents/pysmurf_monitor/pysmurf_monitor.py index 5c767351d..1da097d48 100644 --- a/agents/pysmurf_monitor/pysmurf_monitor.py +++ b/agents/pysmurf_monitor/pysmurf_monitor.py @@ -172,6 +172,8 @@ def run(self, session, params=None): # archive_name to "timestreams" archive_name = meta.get('archive_name', 'smurf') try: + if (meta.get('format') == 'npy') and (not meta['path'].endswith('.npy')): + meta['path'] += '.npy' local_path = meta['path'] remote_path = create_remote_path(meta, archive_name) diff --git a/agents/pysmurf_monitor/requirements.txt b/agents/pysmurf_monitor/requirements.txt index 7c3764864..09cf856af 100644 --- a/agents/pysmurf_monitor/requirements.txt +++ b/agents/pysmurf_monitor/requirements.txt @@ -1,2 +1 @@ -mysql-connector>=2.1.6 sqlalchemy >= 1.4.0 diff --git a/agents/scpi_psu/scpi_psu_agent.py b/agents/scpi_psu/scpi_psu_agent.py index e747cc657..2fbbcdb1a 100644 --- a/agents/scpi_psu/scpi_psu_agent.py +++ b/agents/scpi_psu/scpi_psu_agent.py @@ -57,11 +57,14 @@ def monitor_output(self, session, params=None): Args: wait (float, optional): time to wait between measurements [seconds]. + channels (list[int], optional): + channels to monitor. [1, 2, 3] by default. """ if params is None: params = {} wait_time = params.get('wait', 1) + channels = params.get('channels', [1, 2, 3]) self.monitor = True @@ -74,7 +77,7 @@ def monitor_output(self, session, params=None): 'data': {} } - for chan in [1, 2, 3]: + for chan in channels: data['data']["Voltage_{}".format(chan)] = self.psu.get_volt(chan) data['data']["Current_{}".format(chan)] = self.psu.get_curr(chan) diff --git a/agents/smurf/Smurf_Agent.py b/agents/smurf/Smurf_Agent.py deleted file mode 100644 index 56b5890f7..000000000 --- a/agents/smurf/Smurf_Agent.py +++ /dev/null @@ -1,152 +0,0 @@ -from ocs import ocs_agent -from ocs.Lakeshore.Lakeshore240 import Module -import random -import time -import threading -import pyrogue as pr -from FpgaTopLevel import * - - -class Smurf_Agent: - - def __init__(self, agent): - self.agent = agent - self.lock = threading.Semaphore() - self.job = None - - self.args = { - "simGui": False, - "commType": "eth-rssi-interleaved", - "ipAddr": "192.168.2.20", - "slot": 2, - "pollEn": True - } - self.base = None - - def close(self): - print("Closing connection") - if self.base is not None: - self.base.stop() - - def try_set_job(self, job_name): - with self.lock: - if self.job == None: - self.job = job_name - return True, 'ok.' - return False, 'Conflict: "%s" is already running.' % self.job - - def set_job_done(self): - with self.lock: - self.job = None - - #Init smurf task - def init_hardware(self, session, params=None): - - self.base = pr.Root(name='AMCc', description='') - - self.base.add(FpgaTopLevel( - simGui = self.args["simGui"], - commType = self.args['commType'], - ipAddr = self.args['ipAddr'], - pcieRssiLink = (int(self.args['slot']) - 2) - )) - - streamDataWriter = pr.utilities.fileio.StreamWriter(name="streamDataWriter") - self.base.add(streamDataWriter) - - pr.streamConnect(self.base.FpgaTopLevel.stream.application(0xC1), self.base.streamDataWriter.getChannel(0)) - - self.base.start(pollEn = self.args['pollEn']) - - return True, "Hardware Initialized" - - #Load config file - def read_config(self, session, params={}): - """ - Loads configuration file - - params: - - filename: path to the config file - """ - filename = params.get("filename") - - if filename is None: - print("filename must be specified in params. File not loaded.") - return False, "Correct params not specified" - - self.base.ReadConfig(filename) - - return True, "Loaded config file {}".format(filename) - - def node_from_path(self, path): - """ - Returns a pyrogue node from a given path. - - params: - - path: A list containing the sequence of node names - """ - cur_node = self.base - for node in path: - next_node = cur_node.nodes.get(node) - if next_node is None: - raise ValueError("{} has no child {}".format(cur_node.name, node)) - cur_node = next_node - - return cur_node - - - def set_variable(self, session, params={}): - """ - Sets variable - params: - - path: Path to the variable in the form of a list of node names - - value: value to set - - write: write - """ - print(params) - path = params.get('path') - value = params.get('value') - write = params.get('write', True) - - if path is None or value is None: - raise ValueError("Must specify path and value in params") - - variable = self.node_from_path(path) - variable.set(value, write=write) - - return True, "Varibale {} set to {}".format(variable.name, value) - - def add_listener(self, session, params={}): - """ - Adds listener to a variable - params: - - path: Path to the variable in the form of a list of node names - - callback: Function to be called when variable is updated - Callback must have the form: f(var, value, disp) - """ - path = params.get('path') - # callback = params.get('callback') - def callback(var, value, disp): - print("Variable {} has been updated to {}".format(var, disp)) - - if path is None or callback is None: - raise ValueError("Must specify path and callback in params") - - variable = self.node_from_path(path) - variable.addListener(callback) - - return True, "Added listener to {}".format(variable.name) - - -if __name__ == '__main__': - agent, runner = ocs_agent.init_ocs_agent('observatory.smurf') - - smurf = Smurf_Agent(agent) - - agent.register_task('init_hardware', smurf.init_hardware) - agent.register_task('read_config', smurf.read_config) - agent.register_task('set_variable', smurf.set_variable) - agent.register_task('add_listener', smurf.add_listener) - - runner.run(agent, auto_reconnect=True) - smurf.close() diff --git a/agents/smurf/ocs.cfg b/agents/smurf/ocs.cfg deleted file mode 100644 index 61e0d2177..000000000 --- a/agents/smurf/ocs.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[default] -wamp_server = ws://localhost:8001/ws -wamp_realm = realm1 - diff --git a/agents/smurf/stream_simulator.py b/agents/smurf/stream_simulator.py deleted file mode 100644 index 20ce51fe7..000000000 --- a/agents/smurf/stream_simulator.py +++ /dev/null @@ -1,118 +0,0 @@ -import argparse - -from ocs import ocs_agent, site_config -from spt3g import core -import numpy as np -import time -import copy - -class G3StreamSimulator: - def __init__(self, agent, port=4536): - """ - OCS Agent to simulate data streaming without connection to a smurf. - """ - self.port = port - self.agent = agent - self.log = agent.log - self.is_streaming = False - self.freq = 100 # [Hz] - - self.writer = core.G3NetworkSender(hostname="*", port=self.port) - - # This does not close the port when streaming stops! - # Only when the agent is quit! - self.keys = ["{}".format(i) for i in range(4096)] - self.channels = { - k: { - 'type': 'const', - 'val': 0, - 'stdev': 0.05, - } - for k in self.keys - } - - def set_channel(self, session, params=None): - if params is None: - params = {} - - cids = params['channels'] - cids = cids if type(cids) == list else [cids] - - fparams = params['func_params'] - fparams = fparams if type(fparams) == list else [fparams] - - for i in range(len(cids)): - self.channels[cids[i]] = fparams[i] - - return True, "Set Channels {}".format(cids) - - def start_stream(self, session, params=None): - - if params is None: - params = {} - - delay = params.get('delay', 1) - ts_len = params.get('ts_len', 100) - - # Writes status frame - f = core.G3Frame(core.G3FrameType.Housekeeping) - f['session_id'] = 0 - f['start_time'] = time.time() - self.writer.Process(f) - - self.is_streaming = True - frame_num = 0 - while self.is_streaming: - - f = core.G3Frame(core.G3FrameType.Scan) - - t1 = time.time() - t0 = t1 - delay - - ts = np.arange(t0, t1, 1/self.freq) - - f['session_id'] = 0 - f['frame_num'] = frame_num - f['data'] = core.G3TimestreamMap() - - for k,c in self.channels.items(): - - fparams = copy.copy(c) - bg = np.random.normal(0, fparams.get('stdev', 0), len(ts)) - if fparams['type'] == 'const': - xs = bg + fparams['val'] - elif fparams['type'] in ['lin', 'linear']: - xs = bg + ts * fparams['slope'] + fparams.get('offset', 0) - # Wraps from -pi to pi - xs = np.mod(xs + np.pi, 2 * np.pi) - np.pi - - f['data'][k] = core.G3Timestream(xs) - f['data'][k].start = core.G3Time(t0 * core.G3Units.sec) - f['data'][k].stop = core.G3Time(t1 * core.G3Units.sec) - - self.log.info("Writing G3 Frame") - self.writer.Process(f) - frame_num += 1 - time.sleep(delay) - print("Writing EndProcessingFrame") - f = core.G3Frame(core.G3FrameType.EndProcessing) - self.writer.Process(f) - return True, "Finished streaming" - - def stop_stream(self, session, params=None): - self.is_streaming = False - return True, "Stopping stream" - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - args = site_config.parse_args(agent_class='G3StreamSimulator', - parser=parser) - - agent, runner = ocs_agent.init_site_agent(args) - ss = G3StreamSimulator(agent, port=int(args.port)) - - agent.register_task('set', ss.set_channel) - agent.register_process('stream', ss.start_stream, ss.stop_stream) - - runner.run(agent, auto_reconnect=True) diff --git a/agents/smurf_file_emulator/smurf_file_emulator.py b/agents/smurf_file_emulator/smurf_file_emulator.py index 560fb8502..2c17ec7cf 100644 --- a/agents/smurf_file_emulator/smurf_file_emulator.py +++ b/agents/smurf_file_emulator/smurf_file_emulator.py @@ -2,9 +2,92 @@ import os import argparse import txaio +import numpy as np from ocs import ocs_agent, site_config +import yaml +import so3g +from spt3g import core + + +def get_smurf_status(): + """Loads a sample status dict from file""" + status_file = os.path.join(os.path.split(__file__)[0], 'status_sample.yaml') + with open(status_file, 'r') as f: + return yaml.safe_load(f) + + +SOSTREAM_VERSION = 2 +NBIASLINES = 16 +class G3FrameGenerator: + """ + Helper class for generating G3 Streams. + """ + def __init__(self, stream_id, sample_rate, nchans): + self.frame_num = 0 + self.session_id = int(time.time()) + self.nchans = nchans + self.sample_rate = sample_rate + self.stream_id = stream_id + + def tag_frame(self, fr): + fr['frame_num'] = self.frame_num + fr['session_id'] = self.session_id + fr['sostream_id'] = self.stream_id + fr['sostream_version'] = SOSTREAM_VERSION + fr['time'] = core.G3Time(time.time() * core.G3Units.s) + self.frame_num += 1 + return fr + + def get_obs_frame(self): + fr = core.G3Frame(core.G3FrameType.Observation) + self.tag_frame(fr) + return fr + + def get_status_frame(self): + fr = core.G3Frame(core.G3FrameType.Wiring) + fr['status'] = yaml.dump(get_smurf_status()) + self.tag_frame(fr) + return fr + + def get_data_frame(self, start, stop): + + times = np.arange(start, stop, 1./self.sample_rate) + nsamps = len(times) + chans = np.arange(self.nchans) + names = [f'r{ch:0>4}' for ch in chans] + + count_per_phi0 = 2**16 + data = np.zeros((self.nchans, nsamps), dtype=np.int32) + data += count_per_phi0 * chans[:, None] + data += (count_per_phi0 * 0.2 * np.sin(2 * np.pi * 8 * times)).astype(int) + data += (count_per_phi0 * np.random.normal(0, 0.03, (self.nchans, nsamps))).astype(int) + + fr = core.G3Frame(core.G3FrameType.Scan) + + g3times = core.G3VectorTime(times * core.G3Units.s) + fr['data'] = so3g.G3SuperTimestream(names, g3times, data) + + primary_names = [ + 'UnixTime', 'FluxRampIncrement', 'FluxRampOffset', 'Counter0', + 'Counter1', 'Counter2', 'AveragingResetBits', 'FrameCounter', + 'TESRelaySetting' + ] + primary_data = np.zeros((len(primary_names), nsamps), dtype=np.int64) + primary_data[0, :] = (times * 1e9).astype(int) + fr['primary'] = so3g.G3SuperTimestream(primary_names, g3times, primary_data) + + tes_bias_names = [f'bias{bg:0>2}' for bg in range(NBIASLINES)] + bias_data = np.zeros((NBIASLINES, nsamps), dtype=np.int32) + fr['tes_biases'] = so3g.G3SuperTimestream(tes_bias_names, g3times, bias_data) + + fr['timing_paradigm'] = 'Low Precision' + fr['num_samples'] = nsamps + + self.tag_frame(fr) + return fr + class SmurfFileEmulator: """ @@ -20,32 +103,22 @@ def __init__(self, agent, args): self.smurfdir = os.path.join(self.basedir, 'smurf') self.timestreamdir = os.path.join(self.basedir, 'timestreams') + self.nchans = args.nchans + self.sample_rate = args.sample_rate + self.frame_len = args.frame_len self.streaming = False - def _write_g3_file(self, session_id=None, seq=0, start=None, stop=None): + def _get_g3_filename(self, session_id, seq, makedirs=True): """ - Writes fake G3 timestream file. + Returns the file path for a g3-file with specified session id and seq + idx. """ - if session_id is None: - session_id = int(time.time()) - if start is None: - start = time.time() - if stop is None: - stop = time.time() - timecode = f"{session_id}"[:5] subdir = os.path.join(self.timestreamdir, timecode, self.stream_id) - os.makedirs(subdir, exist_ok=True) filepath = os.path.join(subdir, f"{session_id}_{seq:0>3}.g3") - self.log.info(f"Writing file {filepath}") - with open(filepath, 'w') as f: - f.write(f'start: {start}\n') - f.write(f"stop: {stop}\n") return filepath - - def _write_smurf_file(self, name, action, action_time=None, prepend_ctime=True, is_plot=False): """ @@ -92,7 +165,8 @@ def _write_smurf_file(self, name, action, action_time=None, f.write(f'start: {time.time()}\n') return filepath - def tune_dets(self, session, params=None): + @ocs_agent.param('test_mode', type=bool, default=False) + def tune_dets(self, session, params): """tune_dets() **Task** - Emulates files that might come from a general tune dets @@ -103,6 +177,12 @@ def tune_dets(self, session, params=None): 2. setup_notches 3. tracking_setup 4. short g3 stream + + Args + ---- + test_mode : bool + If True, will skip any wait times associated with writing + g3 files. """ # Find Freq action_time = time.time() @@ -125,7 +205,26 @@ def tune_dets(self, session, params=None): self._write_smurf_file(fname, 'tracking_setup', prepend_ctime=False) # Short g3 stream - self._write_g3_file() + frame_gen = G3FrameGenerator( + self.stream_id, self.sample_rate, self.nchans + ) + fname = self._get_g3_filename(frame_gen.session_id, 0, makedirs=True) + os.makedirs(os.path.dirname(fname), exist_ok=True) + session.data['noise_file'] = fname + writer = core.G3Writer(fname) + writer(frame_gen.get_obs_frame()) + writer(frame_gen.get_status_frame()) + start = time.time() + stop = start + 30 + frame_starts = np.arange(start, stop, self.frame_len) + frame_stops = frame_starts + self.frame_len + for t0, t1 in zip(frame_starts, frame_stops): + if not params['test_mode']: + now = time.time() + if now < t1: + time.sleep(t1 - now) + writer(frame_gen.get_data_frame(t0, t1)) + writer(core.G3Frame(core.G3FrameType.EndProcessing)) return True, "Wrote tune files" @@ -161,7 +260,8 @@ def bias_dets(self, session, params=None): """ return True, 'Wrote det biasing files' - def stream(self, session, params=None): + @ocs_agent.param('duration', default=None) + def stream(self, session, params): """stream(duration=None) **Process** - Generates example fake-files organized in the same way as @@ -176,6 +276,8 @@ def stream(self, session, params=None): If set, will stop stream after specified amount of time (sec). """ session.set_status('starting') + + # Write initial smurf metadata action_time = time.time() files = ['freq.txt', 'mask.txt'] for f in files: @@ -186,41 +288,46 @@ def stream(self, session, params=None): if params.get('duration') is not None: end_time = time.time() + params['duration'] - self.streaming = True session.set_status('running') - seq = 0 - sid = int(time.time()) - g3_file_info = None + frame_gen = G3FrameGenerator( + self.stream_id, self.sample_rate, self.nchans + ) + session.data['session_id'] = frame_gen.session_id + session.data['g3_files'] = [] - file_start = None + seq = 0 + fname = self._get_g3_filename(frame_gen.session_id, seq, makedirs=True) + os.makedirs(os.path.dirname(fname), exist_ok=True) + session.data['g3_files'].append(fname) + writer = core.G3Writer(fname) + file_start = time.time() + + writer(frame_gen.get_obs_frame()) + writer(frame_gen.get_status_frame()) + self.streaming = True while self.streaming: - if g3_file_info is None: - file_start = time.time() - g3_file_info = { - 'start': file_start, - 'seq': seq, - 'session_id': sid - } - - time.sleep(1) + start = time.time() + time.sleep(self.frame_len) + stop = time.time() + writer(frame_gen.get_data_frame(start, stop)) if end_time is not None: - if time.time() > end_time: - g3_file_info['stop'] = time.time() - self._write_g3_file(**g3_file_info) + if stop > end_time: break if time.time() - file_start > self.file_duration: - g3_file_info['stop'] = time.time() - self._write_g3_file(**g3_file_info) - g3_file_info = None + writer(core.G3Frame(core.G3FrameType.EndProcessing)) seq += 1 + fname = self._get_g3_filename( + frame_gen.session_id, seq, makedirs=True + ) + os.makedirs(os.path.dirname(fname), exist_ok=True) + session.data['g3_files'].append(fname) + writer = core.G3Writer(fname) + file_start = time.time() - if g3_file_info is not None: - # Write out unwritten file info on stop - g3_file_info['stop'] = time.time() - self._write_g3_file(**g3_file_info) + writer(core.G3Frame(core.G3FrameType.EndProcessing)) return True, "Finished stream" @@ -242,8 +349,15 @@ def make_parser(parser=None): help='Stream ID for fake smurf stream') pgroup.add_argument('--base-dir', required=True, help="Base directory where data should be written") - pgroup.add_argument('--file-duration', default=10*60, type=int, + pgroup.add_argument('--file-duration', default=10*60, type=float, help="Time in sec before rotating g3 files") + pgroup.add_argument('--nchans', default=1024, type=int, + help="Number of channels to stream from") + pgroup.add_argument('--sample-rate', default=200, type=float, + help="Sample rate for streaming data") + pgroup.add_argument('--frame-len', default=2, type=float, + help="Time per G3 data frame (seconds)") + return parser diff --git a/agents/smurf_file_emulator/status_sample.yaml b/agents/smurf_file_emulator/status_sample.yaml new file mode 100644 index 000000000..5e766c7af --- /dev/null +++ b/agents/smurf_file_emulator/status_sample.yaml @@ -0,0 +1,1240 @@ +AMCc.FpgaTopLevel.AmcCarrierCore.AxiSysMonUltraScale.Temperature: 74.444 +AMCc.FpgaTopLevel.AppTop.AppCore.RtmCryoDet.EnableRampTrigger: 1 +AMCc.FpgaTopLevel.AppTop.AppCore.RtmCryoDet.RampMaxCnt: 76799 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[0].CryoChannels.amplitudeScaleArray: '[0, + 0, 12, 0, 0, 0, 0, 12, 0, 0, 12, 12, 12, 0, 0, 12, 12, 0, 0, 12, 12, 12, 0, 0, 12, + 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 12, + 0, 12, 0, 0, 12, 0, 12, 12, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 12, + 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 0, 0, 12, 0, 12, 12, 12, 0, 0, 12, 12, 0, 12, 12, + 0, 12, 0, 12, 12, 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 12, 0, 12, 0, 0, 12, + 0, 0, 0, 0, 12, 12, 0, 0, 12, 0, 12, 0, 12, 0, 0, 12, 0, 0, 12, 12, 12, 0, 0, 12, + 12, 0, 12, 12, 0, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, 12, 12, 0, 12, + 12, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 0, 12, 12, 12, 0, 12, + 0, 0, 12, 12, 12, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 12, 0, 12, 0, 0, 12, + 0, 12, 0, 0, 0, 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 0, 12, 12, 0, 0, + 12, 0, 12, 0, 0, 0, 0, 12, 0, 12, 0, 12, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, + 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 12, 12, 0, 12, 0, 12, 0, 12, + 0, 0, 12, 12, 12, 0, 0, 0, 12, 0, 0, 12, 0, 12, 0, 12, 12, 0, 12, 0, 0, 12, 0, 12, + 0, 12, 0, 0, 12, 12, 0, 12, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, + 12, 0, 12, 12, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, + 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 0, 12, 0, 12, 0, 12, 0, 12, + 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, + 12, 12, 0, 0, 0, 12, 0, 12, 0, 0, 12, 12, 0, 12, 0, 12, 12, 0, 0, 12, 0, 12, 0, + 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 12, 0, 0, 0, 0, 12, + 12, 0, 0, 12, 12, 0, 0, 12, 12, 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, + 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 12, + 0, 12, 0, 12, 12, 0, 0, 0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[0].CryoChannels.centerFrequencyArray: '[0.0, + 0.0, 0.41600918769836426, 0.0, 0.0, 0.0, 0.0, 0.39798932075500487, 0.0, 0.0, -0.527997064590454, + -0.23404197692871093, 0.18198165893554688, 0.0, 0.0, 0.3979804515838623, 0.3439767837524414, + 0.0, 0.0, -0.4779953956604004, -0.47182030677795406, 0.435911750793457, 0.0, 0.0, + -0.2820028781890869, 0.0, 0.0, -0.0040114402770996095, 0.0, 0.10596599578857421, + 0.0, 0.049967622756958006, 0.0, 0.0, 0.0, -0.3920229434967041, -0.2900084495544433, + 0.30597195625305174, 0.0, 0.3799870491027832, 0.0, 0.0, 0.0, 0.0, 0.35399837493896485, + 0.0, 0.0, 0.0, -0.3500218391418457, 0.0, -0.1920259952545166, 0.0, 0.0, -0.255979585647583, + 0.0, -0.386041259765625, -0.3720402717590332, 0.0, 0.0, 0.07998533248901367, 0.0, + -0.03999323844909668, 0.0, -0.4559892654418945, 0.0, 0.0, 0.0, 0.3299457550048828, + 0.0, 0.0, 0.0, -0.511981201171875, 0.4100177764892578, 0.0, 0.0, 0.12993993759155273, + 0.35402483940124513, 0.0, 0.0, 0.0, 0.0, 0.0, -0.37799706459045407, 0.0, 0.0, -0.33803844451904297, + 0.0, -0.06601710319519043, -0.4519964218139648, -0.20799622535705567, 0.0, 0.0, + 0.08403496742248535, 0.07598404884338379, 0.0, -0.19201970100402832, -0.5300195217132568, + 0.0, -0.41402292251586914, 0.0, -0.3280059814453125, 0.4039785861968994, 0.0, 0.11600275039672851, + 0.0, 0.0, 0.0, 0.2399911880493164, 0.19397306442260742, 0.3679776191711426, 0.0, + 0.0, -0.45401573181152344, 0.0, -0.13002047538757325, 0.0, -0.01403374671936035, + 0.0, 0.0, -0.4380038738250732, 0.0, 0.0, 0.0, 0.0, 0.531992769241333, 0.5699682712554931, + 0.0, 0.0, 0.256030797958374, 0.0, 0.38199620246887206, 0.0, 0.2179478645324707, + 0.0, 0.0, 0.23995800018310545, 0.0, 0.0, 0.42601833343505857, -0.2620379447937012, + -0.38005499839782714, 0.0, 0.0, -0.21599879264831542, -0.42401690483093263, 0.0, + 0.5799973011016846, 0.04397792816162109, 0.0, 0.3039729595184326, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.35591640472412106, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.24404540061950683, + 0.1739563465118408, 0.3940157890319824, -0.4660473346710205, 0.0, -0.3159891128540039, + -0.03402414321899414, 0.0, 0.0, -0.3979461193084717, 0.560006046295166, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.2599966049194336, 0.0, 0.0, 0.0, 0.0, -0.11197843551635742, 0.0, + -0.4821324348449707, 0.0, 0.09194798469543457, 0.034014129638671876, 0.36362485885620116, + 0.0, 0.1439864158630371, 0.0, 0.0, 0.09798974990844726, 0.1599642276763916, 0.507973051071167, + 0.0, 0.0, -0.33998694419860837, 0.19198894500732422, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.153934907913208, -0.2940542221069336, 0.0, 0.4139841556549072, 0.0, -0.4900510311126709, + 0.0, 0.0, 0.22395672798156738, 0.0, 0.5839110374450683, 0.0, 0.0, 0.0, -0.5180488586425781, + 0.0, 0.39201951026916504, -0.16202588081359862, 0.0, 0.0, 0.21818246841430664, 0.5539522647857665, + 0.0, 0.0, 0.0, 0.4260051727294922, 0.0, 0.0, 0.0, -0.11602206230163574, -0.4580379009246826, + 0.0, 0.0, 0.07601122856140137, 0.0, 0.24599647521972656, 0.0, 0.0, 0.0, 0.0, -0.39600248336791993, + 0.0, 0.061977910995483394, 0.0, -0.09601707458496093, -0.002009010314941406, 0.0, + 0.0, 0.0, 0.26806111335754396, 0.0, 0.0, 0.0, -0.3219898223876953, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.05598692893981933, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.1679893970489502, + -0.2741075992584228, 0.0, 0.0, 0.0, -0.5059749126434326, -0.3879531383514404, 0.0, + 0.10793981552124023, 0.0, 0.37561326026916503, 0.0, 0.496058177947998, 0.0, 0.0, + -0.3340015411376953, -0.3219964027404785, 0.18799524307250975, 0.0, 0.0, 0.0, -0.33399338722229005, + 0.0, 0.0, -0.10005855560302734, 0.0, 0.1839308738708496, 0.0, -0.3659667491912842, + 0.14398484230041503, 0.0, -0.10597729682922363, 0.0, 0.0, -0.2959871292114258, 0.0, + -0.32007765769958496, 0.0, -0.07815914154052735, 0.0, 0.0, -0.3500071048736572, + 0.3479508876800537, 0.0, -0.5500076293945312, 0.0, 0.0, -0.5580103397369385, 0.0, + -0.13203377723693846, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5640007495880127, 0.5759814262390136, + 0.02195448875427246, 0.0, 0.0, 0.04395632743835449, -0.16005377769470214, 0.0, -0.21400566101074217, + -0.5520479679107666, 0.0, 0.0, 0.0, 0.0, 0.42197341918945314, 0.4718821048736572, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.12197670936584472, 0.0, 0.0, 0.0, -0.3200390338897705, + 0.015923309326171874, 0.5639647006988525, 0.0, 0.0, -0.5220086574554443, 0.0, 0.0, + 0.5600202083587646, 0.0, 0.0, 0.0, 0.0, -0.0540311336517334, 0.0, 0.33398880958557126, + 0.0, -0.10002150535583496, 0.0, 0.0, 0.0, 0.0, -0.012018871307373046, 0.0, -0.11196870803833008, + 0.0, 0.3439507484436035, 0.0, 0.009989118576049805, 0.0, 0.0, 0.32199954986572266, + 0.0, 0.49400782585144043, 0.0, 0.0, 0.0, 0.0, 0.0, 0.36800379753112794, 0.04996190071105957, + 0.09997286796569824, 0.0, 0.0, -0.07805328369140625, -0.14004693031311036, 0.0, + 0.0, 0.4620270252227783, -0.5379946231842041, 0.0, 0.0, 0.0, -0.21804699897766114, + -0.4640506267547607, 0.0, 0.0, 0.0, -0.17363462448120118, 0.0, -0.17801513671875, + 0.0, 0.0, -0.20001182556152344, 0.4380728244781494, 0.0, -0.010062646865844726, + 0.0, -0.2760794162750244, 0.5840030193328857, 0.0, 0.0, -0.26999788284301757, 0.0, + -0.3319832324981689, 0.0, 0.0, -0.5420073509216309, 0.0, 0.0, 0.24796385765075682, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.500007677078247, 0.41399717330932617, 0.49600696563720703, + 0.0, 0.0, 0.147908878326416, 0.0, 0.21600809097290039, 0.0, 0.0, 0.0, 0.0, 0.15397052764892577, + 0.5919826984405517, 0.0, 0.0, -0.2880386352539062, -0.168019437789917, 0.0, 0.0, + 0.03596763610839843, 0.02396421432495117, 0.0, 0.0, -0.576038932800293, 0.0, -0.4800680637359619, + 0.0, 0.0, -0.372011661529541, 0.0, 0.0, 0.0, 0.0, -0.23602509498596191, 0.0, 0.0, + 0.47597694396972656, 0.0, 0.0, 0.0, 0.0, -0.1060631275177002, 0.0, 0.0, 0.0, 0.0, + 0.0, -0.18603844642639158, 0.446024751663208, -0.26800031661987306, 0.0, -0.09800477027893066, + 0.0, 0.0, 0.0, -0.39800405502319336, -0.31803460121154786, 0.0, 0.0, -0.19002342224121094, + 0.0, -0.04997878074645996, 0.0, 0.09594383239746093, 0.23801350593566895, 0.0, 0.0, + 0.0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[0].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[0].band: 0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[0].bandCenterMHz: 4250.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[0].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[1].CryoChannels.amplitudeScaleArray: '[0, + 0, 12, 0, 12, 0, 0, 12, 0, 0, 12, 12, 12, 12, 0, 0, 0, 0, 12, 12, 12, 12, 0, 12, + 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, + 0, 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, + 12, 12, 12, 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 0, + 12, 12, 0, 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, + 0, 12, 0, 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 0, 12, + 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 12, 0, 0, 12, 12, + 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 12, 12, 12, + 12, 0, 12, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 0, 12, + 0, 0, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 0, 12, 12, + 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 12, 0, 0, 12, 12, 12, 0, + 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 0, 12, 12, 0, 0, 0, + 12, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, + 0, 0, 0, 12, 0, 12, 12, 0, 12, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, + 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 0, 12, 12, 0, 0, 0, 12, 0, 12, 0, + 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 12, + 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, + 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, + 0, 0, 0, 12, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, + 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 12, 12, 0, 0, 0, 0, + 0, 12, 12, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 12, 0, 12, 0, 12, 0, 0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[1].CryoChannels.centerFrequencyArray: '[0.0, + 0.0, -0.5680173397064209, 0.0, 0.3439916610717773, 0.0, 0.0, 0.024023437499999998, + 0.0, 0.0, 0.4319901466369629, -0.03597307205200195, -0.5280049324035644, 0.4279785633087158, + 0.0, 0.0, 0.0, 0.0, -0.2520134925842285, -0.480023717880249, 0.4779835224151611, + 0.20597448348999023, 0.0, -0.47004132270812987, 0.0, 0.0, 0.0, -0.29998369216918946, + -0.06199994087219238, -0.008020162582397461, 0.0, 0.0, 0.0, 0.0, 0.008034467697143555, + 0.0, 0.0, -0.3899667263031006, 0.0, 0.0, 0.0, 0.0, 0.0, 0.20198163986206055, 0.16400084495544434, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.2720656871795654, 0.0, 0.013976812362670898, 0.0, + 0.0, 0.0, -0.46604347229003906, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + -0.20598721504211426, 0.0, 0.146024751663208, -0.14794621467590333, 0.0, 0.0, -0.4140020370483398, + 0.42600317001342775, -0.24202237129211424, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5400391101837158, + -0.1640228748321533, 0.45404419898986814, 0.0, 0.0, 0.0, -0.3119942665100098, 0.0, + -0.05601367950439453, 0.0, -0.5720319271087646, 0.0, 0.0, 0.0, 0.0, 0.26999716758728026, + -0.5060262680053711, 0.0, 0.0, 0.0, -0.5359906196594239, 0.0, 0.0, 0.0, 6.29425048828125e-06, + -0.500011682510376, 0.015981245040893554, 0.0, 0.0, 0.0, 0.0, 0.4860013961791992, + 0.0, 0.0, 0.2059943675994873, 0.0, 0.0, 0.0, 0.0, 0.0, -0.2880077362060547, 0.0, + 0.46595191955566406, 0.0, 0.0, 0.0, 0.0, 0.2160264015197754, 0.0, 0.0, -0.04209780693054199, + 0.0, 0.0, 0.0, 0.0, 0.4299784183502197, 0.0, 0.42198715209960935, -0.25802865028381344, + 0.0, 0.0, 0.0, 0.0, -0.5440139293670654, 0.0, 0.0, -0.2780111789703369, 0.0, 0.0, + 0.0, -0.017959070205688477, 0.0, 0.0, 0.0, 0.3959930419921875, 0.0, 0.0, 0.0, 0.0, + 0.0, -0.05401554107666016, -0.24004783630371093, 0.0, 0.0, -0.11802334785461426, + 0.0, 0.0, 0.11600332260131835, 0.47996678352355954, 0.008001708984375, 0.0, 0.0, + 0.0, 0.0, 0.0, -0.19004230499267577, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.4560220241546631, + 0.0, 0.0, 0.0, 0.04193072319030761, 0.0, 0.0, 0.0, 0.0, -0.031989240646362306, -0.0020104408264160155, + 0.11800146102905273, 0.15597839355468748, 0.0, 0.5299216747283936, 0.0, 0.0, 0.5759978771209716, + -0.4060312271118164, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06800122261047363, + 0.0, 0.0, 0.0, 0.0, -0.22803082466125488, 0.0, -0.41203823089599606, 0.0, 0.5099930763244629, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.361995792388916, -0.20801310539245604, -0.2499913215637207, + 0.0, 0.23804740905761718, 0.0, 0.0, 0.0, 0.5420192241668701, -0.14999985694885254, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.012001132965087891, -0.5119967937469482, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.3200049877166748, -0.37805643081665036, 0.0, 0.0, 0.0, + 0.0, -0.30600929260253906, 0.0, 0.0, -0.4100303649902344, 0.0, 0.18996062278747558, + 0.0, 0.0, 0.44799442291259767, 0.3479487419128418, 0.1520071506500244, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, -0.2360372543334961, 0.4719832420349121, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.17599811553955078, 0.0, 0.15998625755310059, 0.0, 0.0, 0.0, 0.0, 0.44396352767944336, + -0.37398018836975094, 0.0, 0.0, 0.0, -0.2779808521270752, 0.0, 0.0, -0.11199874877929687, + 0.28995337486267087, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.12202019691467285, 0.10001721382141113, + 0.0, 0.3739793300628662, 0.0, 0.0, 0.0, -0.3360281467437744, 0.0, 0.0, 0.0, -0.31400327682495116, + 0.0, 0.0, 0.0, 0.0, 0.007999563217163086, 0.0, -0.4660128593444824, -0.1900242805480957, + 0.0, 0.17596492767333985, 0.0, 0.0, -0.2620121955871582, 0.0, 0.24400792121887205, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32379984855651855, 0.04999351501464844, 0.47795505523681636, + 0.0, 0.0, 0.0, -0.4959993839263916, 0.0, 0.0, 0.0, 0.5119736194610596, 0.0, 0.0, + 0.0, 0.0, 0.4739863872528076, 0.0, 0.15400142669677733, 0.0, 0.28398427963256834, + -0.15799198150634766, 0.0, 0.0, 0.0, 0.1939826488494873, 0.0, 0.32202773094177245, + 0.0, 0.0, 0.0, 0.0, 0.1899949550628662, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.3440397262573242, + 0.0, 0.10799531936645508, 0.31598510742187497, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4899923801422119, + 0.0, 0.0, 0.0, 0.0, -0.5900450706481933, 0.0, 0.0, 0.0, -0.045949888229370114, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5200498580932617, -0.4699991226196289, 0.351961612701416, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.5900452136993408, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06399393081665039, + 0.0, 0.2119877815246582, 0.0, 0.10401005744934082, 0.0, 0.0, 0.0, 0.48800067901611327, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1479718208312988, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.417969274520874, 0.0, 0.0, 0.0, 0.11392393112182617, 0.0, 0.0, 0.0, 0.0, + 0.1140322208404541, 0.42998156547546384, 0.5819997310638427, 0.0, 0.0, 0.1799695014953613, + 0.0, 0.0, 0.0, -0.06996746063232422, -0.11796426773071289, 0.1918776512145996, 0.0, + 0.0, 0.0, 0.0, 0.0, -0.5740214824676514, 0.0, 0.08398089408874511, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.4199828624725342, 0.0, 0.0, 0.0, 0.0, -0.22801251411437987, + 0.0, -0.3579998016357422, 0.27796096801757814, -0.5500136375427246, 0.0, 0.0, 0.0, + 0.0, 0.0, -0.21200551986694335, -0.1779994010925293, 0.0, 0.0, 0.0, 0.0, 0.1479935646057129, + 0.0, -0.3500258445739746, 0.06798934936523438, 0.0, 0.0, 0.0, 0.16998596191406248, + 0.0, -0.22204298973083494, 0.0, -0.48409366607666016, 0.0, 0.0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[1].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[1].band: 1 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[1].bandCenterMHz: 4750.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[1].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[2].CryoChannels.amplitudeScaleArray: '[12, + 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 12, 12, 0, 0, 12, 0, 12, + 0, 12, 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, + 0, 0, 12, 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 12, 0, + 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, + 12, 12, 12, 0, 12, 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 12, 12, 12, 0, + 12, 0, 0, 0, 0, 12, 0, 0, 0, 12, 12, 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, + 12, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 12, 12, 0, 0, 12, 0, 0, 12, + 0, 12, 0, 12, 12, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, + 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 12, + 0, 12, 12, 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 0, 12, 12, 0, 0, 12, 0, 0, 12, 12, 12, + 0, 12, 0, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 12, 12, 0, 0, 12, 0, 0, 0, 12, 12, 12, + 0, 12, 12, 0, 0, 12, 0, 12, 0, 12, 12, 12, 0, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 12, + 0, 12, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, + 0, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 12, 12, 12, + 0, 12, 12, 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 12, 12, + 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 0, + 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 0, 12, 0, 12, 0, 0, 12, 12, 0, 0, 12, + 12, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 0, 0, 0, + 12, 0, 12, 12, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, 12, 0, 0, 0, 0, + 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, + 0, 12, 12, 0, 0, 12, 0, 12, 0, 12, 12, 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 12, + 0, 0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[2].CryoChannels.centerFrequencyArray: '[-0.47391285896301266, + 0.0, 0.0, 0.0, 0.0, 0.3880022048950195, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.29800944328308104, + 0.0, 0.0, 0.0, 0.24200878143310545, 0.0, 0.0, -0.5400040626525878, 0.2640091896057129, + 0.0, 0.0, -0.047991800308227535, 0.0, -0.3840036392211914, 0.0, -0.3380092620849609, + -0.3159905433654785, 0.0, 0.0, 0.24596457481384276, 0.0, 0.0, 0.0, 0.0, 0.05397319793701172, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5220010757446288, 0.1339841365814209, -0.1799772262573242, + 0.0, -0.0020107269287109376, 0.0, 0.0, 0.0, 0.0, 0.31197595596313477, 0.3539772033691406, + 0.0, 0.0, -0.5719689846038818, 0.0, 0.0, 0.0, 0.0, -0.5139874935150146, 0.0, -0.0380375862121582, + -0.2660196304321289, 0.0, 0.0, 0.0, -0.39402923583984373, 0.0, 0.0, 0.3659813404083252, + -0.04802041053771972, 0.0, 0.0, 0.0, 0.5539954662322998, 0.0, 0.0, -0.3460132598876953, + 0.0, 0.0, 0.0, 0.0, -0.58400559425354, 0.0, 0.0, 0.09399890899658203, 0.0, 0.0, + 0.0, -0.20998678207397461, 0.0, 0.017925882339477537, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3440083980560303, + -0.542021369934082, -0.27200875282287595, 0.0, -0.4599931240081787, -0.12004237174987792, + 0.0, 0.0, 0.0, 0.0, 0.59400315284729, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3419802188873291, + 0.0, -0.3780778884887695, -0.4720104217529297, 0.23801379203796386, 0.0, -0.1600111484527588, + 0.0, 0.0, 0.0, 0.0, 0.20199208259582518, 0.0, 0.0, 0.0, 0.1839634895324707, -0.4080214977264404, + 0.0, 0.34400153160095215, -0.5899962902069091, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4100021839141846, + 0.0, 0.0, 0.0, 0.10999159812927246, -0.060000801086425776, 0.0, 0.0, 0.349989652633667, + 0.0, 0.0, 0.0, 0.5259823322296142, 0.0, 0.0, 0.0, -0.4560235977172851, 0.0, 0.0, + 0.0, 0.0, 0.13199987411499023, 0.4480125904083252, 0.0, 0.0, 0.5220035076141357, + 0.0, 0.0, 0.1759406089782715, 0.0, -0.42999544143676754, 0.0, 0.26997900009155273, + 0.09199376106262207, 0.0, 0.0, 0.0, 0.42599029541015626, -0.4160299301147461, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.39598803520202636, 0.0, 0.0, 0.0, -0.38401193618774415, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.555950403213501, 0.0, 0.0, 0.0, 0.20401954650878906, + 0.31994991302490233, 0.4919689178466797, 0.0, 0.0, 0.4580204486846924, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.11200175285339355, 0.0, 0.0, 0.0, 0.0, 0.135975980758667, + 0.0, 0.060008096694946285, 0.5239883422851562, 0.0, 0.0, -0.3139935493469238, 0.0, + -0.5939921379089356, 0.0, 0.0, -0.5440034866333008, 0.0, 0.0, 0.0, 0.5460418224334717, + 0.4000092029571533, 0.0, 0.0, 0.5319414138793945, 0.0, 0.0, 0.5399788856506348, + 0.29199686050415036, -0.19600768089294432, 0.0, -0.44600672721862794, 0.0, -0.2180009365081787, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.24196457862854004, 0.27797427177429196, 0.0, 0.0, 0.19599194526672362, + -0.4120036125183105, 0.0, 0.0, -0.34800968170166013, 0.0, 0.0, 0.0, -0.25199432373046876, + 0.012019014358520508, 0.15396294593811036, 0.0, 0.22798318862915037, 0.5239952087402343, + 0.0, 0.0, -0.21200165748596192, 0.0, 0.08799633979797362, 0.0, -0.1680124282836914, + -0.1020421028137207, 0.49198007583618164, 0.0, -0.10999174118041992, 0.2859798431396484, + 0.495999813079834, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.2819977283477783, 0.0, + 0.30197768211364745, -0.04597663879394531, 0.0, 0.0, 0.0, 0.17199039459228516, 0.0, + 0.0, 0.0, 0.33399024009704587, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.05999279022216797, + -0.07602367401123046, 0.11403894424438477, 0.0, -0.1060378074645996, 0.0, 0.0, 0.0, + 0.0, 0.455974531173706, 0.0, 0.0, 0.0, -0.39802980422973633, -0.254036808013916, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.41000661849975584, 0.0, 0.004012298583984375, 0.0, 0.0, + 0.06596946716308594, 0.0, 0.0, -0.23801422119140625, -0.288026762008667, 0.07198762893676758, + 0.0, 0.47997307777404785, -0.006014013290405273, -0.5840053081512451, 0.0, 0.4359548091888428, + -0.5500173568725586, 0.0, 0.0, -0.22600822448730468, -0.3960076332092285, 0.0, 0.0, + 0.0, 0.15000901222229004, 0.0, 0.0, 0.36599206924438477, 0.0, 0.0, 0.0, -0.4259897232055664, + -0.16202030181884766, 0.0, 0.0, -0.0040132999420166016, -0.35199408531188964, 0.0, + 0.0, 0.0, -0.2800652503967285, 0.0, 0.0, 0.0, 0.20396890640258789, 0.0, 0.0, -0.012024450302124023, + 0.0, 0.006003856658935547, 0.0, -0.2540217876434326, 0.0, 0.0, 0.0, -0.2860960006713867, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.17402930259704588, -0.11399259567260742, -0.33200712203979493, + 0.0, 0.0, -0.22604484558105467, 0.0, 0.0, 0.3760158061981201, 0.0, -0.2820111751556396, + 0.0, 0.0, 0.3739851951599121, 0.18200139999389647, 0.0, 0.0, -0.2380223751068115, + -0.026019573211669922, 0.0, 0.22802796363830566, 0.0, 0.0, 0.0, -0.5120018005371093, + 0.0, 0.0, 0.0, -0.21199278831481932, 0.0, 0.0, 0.0, 0.28998799324035646, 0.5800008773803711, + 0.18800439834594726, 0.0, 0.0, 0.1979912281036377, 0.0, 0.0, 0.0, 0.0, 0.21999664306640623, + 0.0, -0.5579509735107422, -0.19598894119262694, 0.0, 0.0, 0.0, 0.0, -0.529987621307373, + 0.0, 0.05997576713562011, 0.0, 0.0, 0.0, 0.5560177803039551, -0.3260039806365967, + -0.12200274467468261, 0.0, -0.09403824806213379, 0.5559582710266113, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, -0.5140048027038574, 0.0, 0.0, 0.0, -0.5739872932434081, -0.12400760650634765, + -0.42800302505493165, 0.0, 0.13596625328063963, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31998724937438966, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.038021278381347653, 0.0, 0.0, 9.1552734375e-06, + 0.3299941062927246, 0.0, 0.0, -0.4759928226470947, 0.0, -0.03600783348083496, 0.0, + -0.5240106582641602, 0.4559628009796142, 0.0, 0.0, 0.13398141860961912, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.20799880027770995, 0.0, 0.11200346946716308, 0.0, 0.15400500297546385, + 0.0, 0.0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[2].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[2].band: 2 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[2].bandCenterMHz: 5250.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[2].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[3].CryoChannels.amplitudeScaleArray: '[0, + 0, 12, 0, 12, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 0, 12, + 0, 0, 0, 0, 0, 0, 0, 12, 0, 12, 0, 12, 0, 0, 12, 12, 0, 0, 12, 12, 12, 0, 0, 12, + 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 12, 12, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 12, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, + 12, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, + 12, 12, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, + 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 12, 0, 12, 0, 12, 0, 0, 12, 12, 0, 0, 0, + 12, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 12, 12, + 12, 0, 0, 0, 0, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 12, 0, + 12, 0, 12, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 12, 0, 12, + 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 12, 0, 12, 0, + 0, 0, 12, 0, 12, 12, 12, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 0, 0, + 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, + 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, + 0, 0, 0, 12, 12, 12, 0, 12, 12, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, + 0, 0, 12, 12, 12, 0, 0, 12, 0, 0, 0, 0, 12, 12, 0, 12, 0, 0, 12, 12, 0, 0, 0, 12, + 0, 0, 12, 12, 12, 12, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, + 0, 0, 0, 12, 12, 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, 12, 0, 12, 0, 0, 0, + 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 12, + 0, 12, 0, 12, 0, 0, 12, 12, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, + 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 12, 0, 12, 0, 12, 12, 0, 0, 12, 12, 12, 0, 0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[3].CryoChannels.centerFrequencyArray: '[0.0, + 0.0, 0.45199084281921387, 0.0, 0.40000247955322266, 0.0, 0.0, 0.0, 0.0, 0.0, -0.27199487686157225, + 0.16999754905700684, -0.28002004623413085, 0.0, 0.0, 0.0, -0.5040172576904297, 0.0, + -0.01400456428527832, 0.19400281906127928, 0.0, 0.0, 0.0, 0.0, -0.11801204681396484, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38599491119384766, 0.0, -0.17198452949523926, + 0.0, 0.25398602485656735, 0.0, 0.0, -0.4600055694580078, -0.4459925651550293, 0.0, + 0.0, -0.06599464416503906, -0.5039926528930664, -0.2240053653717041, 0.0, 0.0, -0.05199322700500488, + 0.0, 0.19999680519104004, 0.0, 0.3920016288757324, 0.0, 0.0, 0.0, 0.3159945487976074, + 0.0, 0.0, -0.061996364593505854, -0.09601807594299316, -0.010013151168823241, 0.0, + -0.199982213973999, 0.0, 0.0, 0.0, 0.13798985481262208, -0.48199625015258785, 0.0, + 0.0, -0.02601513862609863, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.2459999084472656, 0.0, 0.0, 0.0, -0.4320098876953125, 0.0, 0.0, 0.0, 0.0, 0.18798952102661132, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.6000240325927734, 0.13198800086975096, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.4060186386108398, 0.573986005783081, 0.0, 0.0, 0.0, 0.22598876953125, + 0.0, 0.0, -0.4860044002532959, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.452012300491333, + 0.18400096893310547, 0.29398627281188966, 0.0, 0.0, 0.0, 0.0, -0.11800775527954101, + 0.0, -0.29599857330322266, 0.0, 0.0, 0.0, -0.1160038948059082, 0.0, 0.0, -0.27600445747375485, + 0.0, 0.0, 0.0, -0.14401016235351563, 0.3179966926574707, 0.0, 0.0, 0.0, 0.29599442481994626, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5679888725280762, 0.0, 0.0, 0.0, 0.1520001411437988, + -0.6079997062683106, 0.0, -0.18401570320129393, 0.0, -0.39400348663330076, 0.0, + 0.0, -0.43799328804016113, 0.5839954376220703, 0.0, 0.0, 0.0, -0.10400204658508301, + -0.4479941368103027, 0.0, -0.5740253448486328, 0.0, 0.0, 0.0, 0.03999652862548828, + -0.38601064682006836, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.41998758316040036, 0.0, + 0.0, -0.21201868057250975, 0.0, 0.0, 0.5539825916290283, -0.24401822090148925, 0.3459949493408203, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.33400583267211914, 0.0, 0.14598612785339354, 0.0, + -0.1919762134552002, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.43400959968566893, + 0.0, 0.0, 0.38999176025390625, 0.0, -0.5840168952941894, 0.0, -0.4700052738189697, + 0.0, 0.0, -0.5460067749023437, 0.0, -0.5560033321380615, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, -0.27400388717651364, 0.0, 0.0, 0.0, -0.012006282806396484, 0.0, 0.0, -0.1779994010925293, + 0.0, -0.12200732231140136, 0.0, 0.0, 0.0, 0.04799680709838867, 0.0, 0.0, -0.37199864387512205, + 0.0, 0.0, 0.0, 0.22600779533386228, 0.0, 0.0, 0.0, 0.26400117874145507, 0.0, 0.0, + 0.0, -0.316001558303833, 0.0, 0.0, 0.4779948234558105, 0.0, -0.3420382976531982, + 0.0, 0.0, 0.0, -0.3740083694458008, 0.0, 0.4999943733215332, 0.26999173164367674, + -0.5480036258697509, 0.0, 0.0, 0.0, 0.32599453926086425, 0.0, 0.0, 0.0, 0.0, -0.5599987506866455, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.5240066528320312, 0.0, 0.0, 0.0, 0.0, -0.19799351692199707, + 0.0, 0.0, 0.1360011577606201, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5999882698059081, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5159926414489746, 0.0, 0.5499817371368408, + 0.0, 0.0, 0.0, 0.0, 0.32198781967163087, 0.0, -0.579990291595459, 0.0, 0.0, -0.10799331665039062, + 0.0, 0.0, 0.0, -0.20399565696716307, 0.2519984722137451, -0.19800281524658203, 0.0, + -0.3979983329772949, 0.0, 0.0, 0.44599242210388185, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, -0.39601478576660154, 0.41599245071411134, -0.5219992160797119, 0.0, -0.5500247955322265, + -0.534019947052002, 0.0, 0.0, 0.16595234870910644, 0.0, 0.0, 0.0, 0.5580044746398926, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.05201153755187988, 0.0, -0.5299834728240966, 0.0, 0.0, + -0.500011396408081, 0.29599699974060056, -0.5299871921539306, 0.0, 0.0, -0.5100080966949463, + 0.0, 0.0, 0.0, 0.0, 0.21798162460327147, 0.38797130584716794, 0.0, 0.5499960422515869, + 0.0, 0.0, -0.0020102977752685545, -0.2600104808807373, 0.0, 0.0, 0.0, -0.380005931854248, + 0.0, 0.0, -0.30602688789367677, 0.37200307846069336, -0.07999591827392578, -0.1079988956451416, + 0.0, 0.0, 0.0, 0.0, -0.27200818061828613, 0.0, 0.4879767894744873, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, -0.2739983081817627, -0.28400588035583496, 0.0, 0.0, 0.0, 0.0, + 0.0, -0.36600122451782224, -0.34400095939636227, 0.0, 0.0, 0.08599247932434081, + 0.0, 0.0, 0.0, 0.3459872245788574, 0.45198268890380855, 0.3639997959136963, 0.0, + 0.1280104637145996, 0.0, 0.0, 0.08999547958374023, 0.0, 0.10399403572082519, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.09000062942504883, 0.0, 0.0, 0.0, 0.0, 0.1099928855895996, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.3440056800842285, 0.0, 0.0, 0.0, 0.0, 0.0939906120300293, + 0.0, 0.0, 0.0, -0.3920082092285156, 0.0, -0.3740100860595703, 0.0, -0.39600219726562497, + 0.0, 0.0, -0.5020069599151611, -0.07799835205078125, 0.0, 0.0, 0.0, -0.15601372718811035, + -0.10201821327209472, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11603665351867676, 0.20201382637023926, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.4679892539978027, 0.0, -0.04999322891235351, 0.0, + 0.0, -0.23200421333312987, 0.0, 0.0, -0.3859999179840088, 0.0, 0.11600117683410643, + 0.0, -0.30002102851867674, -0.12800045013427733, 0.0, 0.0, 0.5439904689788818, 0.023996973037719724, + 0.34200096130371094, 0.0, 0.0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[3].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[3].band: 3 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[3].bandCenterMHz: 5750.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[3].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[4].CryoChannels.amplitudeScaleArray: '[12, + 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 12, 0, + 0, 0, 0, 12, 0, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, 0, 0, + 0, 12, 12, 12, 0, 12, 12, 12, 0, 12, 12, 12, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 12, + 0, 12, 12, 0, 0, 0, 12, 12, 0, 0, 0, 12, 12, 0, 12, 12, 12, 0, 12, 12, 12, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 12, 0, 12, 0, 12, 0, + 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 12, 0, + 0, 12, 0, 0, 12, 0, 0, 12, 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 12, 12, 12, 0, 12, + 12, 0, 12, 12, 12, 12, 0, 12, 0, 0, 12, 12, 12, 12, 0, 0, 0, 0, 0, 12, 12, 12, 0, + 0, 12, 0, 12, 12, 12, 12, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 12, 0, 0, 0, 0, 12, + 0, 12, 12, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 0, 0, 12, + 0, 12, 12, 0, 12, 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, 12, 12, 12, 12, + 0, 12, 12, 0, 12, 12, 0, 12, 0, 12, 12, 0, 0, 0, 12, 12, 0, 12, 12, 0, 0, 12, 12, + 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, + 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 12, 12, 0, + 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 12, 12, 12, 12, + 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, 12, 12, 12, 12, 0, 0, 0, 12, 0, 12, 12, + 12, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 0, 12, 0, 12, + 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 12, 0, 12, 12, 12, 12, 0, 0, 0, 0, 0, 12, 12, + 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 12, 12, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 12, + 12, 0, 0, 12, 0, 12, 12, 0, 0, 0, 0, 12, 0, 12, 12, 12, 12, 0, 0, 0, 12, 0, 12, + 12, 0, 0, 12, 12, 0, 12, 0, 0, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, 12, 0, 12, 12, + 0, 12, 0, 12, 0, 12, 0, 12, 0, 0, 0, 0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[4].CryoChannels.centerFrequencyArray: '[-0.37003026008605955, + 0.0, 0.0, 0.0, -0.5460069179534912, 0.0, 0.0, 0.24801650047302246, 0.0, 0.0, 0.0, + 0.0, -0.08401479721069335, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.2620513916015625, 0.10598516464233398, + 0.0, 0.0, 0.0, -0.3020007133483887, 0.0, 0.0, 0.0, 0.0, -0.20604300498962402, 0.0, + -0.19206690788269043, 0.0, 0.0, -0.37399678230285643, -0.10022478103637696, 0.0, + 0.0, 0.0, 0.10198273658752441, 0.0, 0.0, 0.0, 0.03395290374755859, -0.6059906959533691, + -0.5845261573791504, 0.0, -0.19005775451660156, 0.0, 0.0, 0.0, 0.0377955436706543, + 0.3907572269439697, 0.5859074592590332, 0.0, -0.018062353134155273, -0.5540071964263916, + 0.5299462795257568, 0.0, 0.06196446418762207, 0.11202263832092285, 0.029963636398315428, + 0.0, 0.0, 0.0, 0.0, 0.21597590446472167, -0.5282551288604737, 0.17399754524230956, + 0.0, 0.0, -0.39197573661804197, -0.27398815155029294, 0.0, 0.469989538192749, -0.14203090667724608, + 0.0, 0.0, 0.0, 0.15185022354125977, 0.11398901939392089, 0.0, 0.0, 0.0, 0.4900001049041748, + 0.3859980583190918, 0.0, 0.46194519996643063, -0.07200279235839843, -0.1220698356628418, + 0.0, -0.05593414306640625, -0.43801589012145997, 0.31198983192443847, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3259554862976074, 0.0560145378112793, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4980006694793701, 0.0, 0.0, -0.4599899768829345, + 0.0, -0.060038852691650386, 0.0, -0.0024048328399658204, 0.0, 0.20794973373413086, + 0.0, -0.3320607662200928, -0.3260061264038086, 0.0, 0.0, -0.19602527618408203, -0.5580372333526611, + 0.0, 0.0, -0.10399904251098632, 0.0, 0.5718242168426514, 0.0, 0.0, 0.0, 0.0, 0.0, + -0.10404138565063477, -0.5199883460998534, 0.0, 0.0, 0.4579771041870117, 0.0, 0.0, + -0.34999451637268064, 0.0, 0.0, 0.13387455940246581, 0.0, 0.0, 0.19799723625183105, + -0.009981822967529296, 0.0, 0.49191370010375973, -0.0899754524230957, 0.0, 0.0, + -0.3939967632293701, 0.5199950695037842, 0.0, 0.0, -0.3840724468231201, -0.5880030155181885, + -0.5303831577301025, 0.0, 0.037985944747924806, 0.48200154304504395, 0.0, 0.22797703742980957, + 0.3580026626586914, -0.2539967536926269, -0.03433871269226074, 0.0, -0.2541176319122314, + 0.0, 0.0, 0.3079991340637207, -0.170211124420166, 0.29390416145324705, 0.10800991058349609, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.016089248657226562, 0.2079960823059082, -0.43599572181701657, + 0.0, 0.0, -0.36199522018432617, 0.0, 0.0419611930847168, -0.2521174907684326, 0.26601362228393555, + -0.5043087959289551, 0.0, -0.17400240898132324, -0.5000155448913574, 0.0, 0.0, 0.0, + 0.4900052547454834, 0.0, 0.0, 0.08592424392700194, 0.5880061626434326, 0.0, 0.0, + 0.0, 0.0, 0.3861229419708252, 0.0, 0.3519281387329101, -0.22200150489807127, 0.0, + 0.0, 0.0, 0.0, 0.5160205364227295, 0.0, -0.19994730949401854, 0.3999986171722412, + 0.0, 0.0, 0.4962451457977295, -0.011997413635253905, 0.0, 0.0, 0.0, -0.40799603462219236, + 0.0, 0.0, 0.0, 0.0, 0.248016357421875, 0.0, -0.06399493217468262, -0.04201011657714843, + 0.0, 0.29200072288513185, 0.0, -0.06400995254516602, 0.0, 0.0, -0.609913444519043, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.5158862113952637, 0.0, -0.3160041332244873, 0.0, 0.0, + -0.39200592041015625, 0.2182514190673828, -0.010010576248168946, -0.2561778545379639, + 0.0, -0.03798937797546387, 0.4799885272979736, 0.0, -0.011982536315917969, -0.5280790328979492, + 0.0, -0.3919475555419922, 0.0, -0.45401573181152344, 0.5559692859649658, 0.0, 0.0, + 0.0, 0.4900016784667969, -0.19806289672851562, 0.0, -0.5640991687774658, 0.5280022144317627, + 0.0, 0.0, -0.25624594688415525, -0.5580443859100341, 0.38989734649658203, 0.0, 0.0, + 0.0, 0.0, 0.4659544944763183, 0.0, 0.0, 0.0, 0.0, 0.0, -0.024003124237060545, 0.0, + -0.5400047779083251, 0.0, 0.0, 0.0, 0.0, 0.0, -0.15399470329284667, 0.0, -0.3920097827911377, + 0.0, 0.0, 0.0, 0.0, 0.33597235679626464, 0.21799077987670898, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, -0.15601401329040526, 0.0699920654296875, 0.0, 0.0, 0.0, 0.0, -0.566447639465332, + 0.0, 0.46197781562805174, 0.0, 0.0, 0.0, 0.4819968223571777, -0.08199806213378906, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.32396750450134276, -0.03831353187561035, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.2819329261779785, 0.0, 0.5978599548339844, 0.0, 0.0, 0.0, -0.5360343933105468, + 0.0, 0.0, 0.37798233032226564, -0.2203052043914795, -0.6080065727233886, 0.24001150131225585, + 0.0, 0.17596650123596191, 0.0, 0.0, 0.0, -0.45001358985900874, -0.31005077362060546, + -0.5208340644836426, 0.0, 0.0, 0.0, 0.0, -0.5740010261535644, 0.3879990577697754, + -0.27599129676818845, 0.6019266128540038, 0.0, 0.0, 0.0, 0.5161484241485595, 0.0, + 0.34401140213012693, 0.5939699649810791, -0.14679551124572754, 0.0, 0.0, 0.3639650344848633, + 0.0, 0.1500171661376953, 0.0, -0.5640079021453858, 0.0, 0.0, 0.0, 0.049987506866455075, + 0.0, -0.373994779586792, 0.0, 0.34398636817932127, 0.0, 0.0, 0.0, -0.5680023193359375, + 0.0, 0.5879705429077148, 0.0, 0.4159993171691894, 0.0, 0.0, -0.39001736640930174, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.421983003616333, 0.0, 0.49813957214355464, 0.31798653602600097, + 0.0, 0.35599050521850584, 0.18602113723754882, 0.10000433921813964, 0.5299282550811767, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.4839963912963867, 0.38399019241333004, 0.0, 0.0, -0.3620279788970947, + -0.3560040950775146, 0.0, 0.0, 0.0, -0.044008827209472655, 0.0, 0.0, -0.29611430168151853, + -0.14199943542480467, 0.3498251438140869, 0.0, 0.0, 0.0, 0.27584052085876465, 0.0, + -0.20401153564453126, -0.5300954818725586, 0.0, 0.0, 0.0, 0.3519566059112549, 0.4358652591705322, + 0.0, 0.0, 0.2719965934753418, 0.0, -0.17998409271240234, -0.4660330295562744, 0.0, + 0.0, 0.0, 0.0, 0.44999971389770504, 0.0, -0.37399706840515134, -0.3900907516479492, + -0.4200114727020264, -0.016069793701171873, 0.0, 0.0, 0.0, 0.11994681358337402, + 0.0, 0.18192486763000487, -0.4580078601837158, 0.0, 0.0, 0.23791608810424802, 0.42797040939331055, + 0.0, 0.05997061729431152, 0.0, 0.0, -0.26794681549072263, 0.0, -0.1320582389831543, + 0.0, 0.0, 0.0, -0.3060699462890625, -0.5880231857299805, 0.0, 0.0, 0.0, -0.3860149383544922, + 0.0, 0.09599332809448242, -0.37605872154235837, 0.0, -0.058088064193725586, 0.0, + 0.3959977626800537, 0.0, -0.28410930633544923, 0.0, 0.001885986328125, 0.0, 0.0, + 0.0, 0.0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[4].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[4].band: 4 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[4].bandCenterMHz: 6250.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[4].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[5].CryoChannels.amplitudeScaleArray: '[12, + 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 12, 0, 0, 12, 12, 12, 12, 0, 12, 0, + 12, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 0, 12, 0, 12, 12, 12, 0, 0, 12, 0, + 0, 12, 12, 12, 12, 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 0, + 0, 0, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 12, 0, 12, 0, 12, 0, + 0, 12, 12, 0, 12, 12, 12, 12, 0, 12, 12, 0, 12, 0, 12, 12, 0, 12, 0, 0, 0, 0, 12, + 0, 12, 0, 12, 0, 12, 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 12, + 0, 0, 12, 0, 12, 0, 12, 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 12, 12, 12, 12, 0, 0, 12, + 0, 12, 12, 0, 12, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 12, 0, 12, 0, 0, 0, 12, 12, + 0, 0, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 12, 0, 0, 12, 0, 12, 12, 12, 12, 0, 0, 12, + 12, 0, 12, 12, 12, 0, 0, 0, 0, 12, 12, 12, 12, 12, 0, 12, 0, 0, 0, 0, 0, 0, 12, + 12, 0, 12, 12, 12, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, 12, 12, 0, 12, + 12, 0, 12, 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 0, 0, 12, 12, 12, 0, 12, 0, 12, 0, 12, + 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 12, + 0, 0, 12, 0, 12, 0, 0, 0, 0, 12, 12, 12, 12, 12, 0, 12, 0, 12, 12, 0, 0, 0, 12, + 0, 0, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, + 0, 12, 12, 12, 12, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 12, 0, 0, 12, 0, 0, 12, + 0, 0, 12, 12, 12, 12, 0, 12, 12, 0, 0, 0, 12, 12, 0, 12, 0, 0, 12, 12, 12, 0, 0, + 12, 0, 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 12, 12, 0, 12, 0, 0, 0, 0, 12, 0, 0, + 12, 12, 0, 0, 12, 12, 12, 0, 0, 12, 0, 0, 0, 12, 12, 0, 12, 12, 0, 12, 12, 12, 12, + 0, 12, 0, 0, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 0, 0, 12, + 0, 12, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 12, 12, 12, 12, 0, 0, 0, 0, 12, 0, 0, 12, + 0, 12, 12, 12, 0, 12, 12, 12, 0, 12]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[5].CryoChannels.centerFrequencyArray: '[0.521991491317749, + 0.0, 0.0, -0.41803607940673826, 0.0, 0.0, 0.0, 0.0, -0.28397369384765625, 0.0, 0.0, + -0.10602364540100097, 0.0, 0.0, 0.0, 0.15203118324279785, 0.0, 0.0, -0.2900050163269043, + -0.3600434303283691, -0.5560396671295166, 0.2820293426513672, 0.0, 0.2758913040161133, + 0.0, -0.3740501403808594, 0.0, 0.0, -0.17600769996643065, 0.0, 0.0, -0.6078886985778809, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.3659996509552002, 0.0, 0.0, -0.540102195739746, 0.0, + -0.11198701858520507, 0.4559313297271728, 0.363975191116333, 0.0, 0.0, 0.20588951110839843, + 0.0, 0.0, -0.24803667068481444, -0.004010295867919921, -0.001996278762817383, -0.5520688533782959, + 0.0, 0.04392142295837402, 0.0, 0.07799491882324218, 0.0, -0.37399864196777344, 0.0, + 0.0, 0.0, -0.314044189453125, 0.3338913917541504, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.18992557525634765, 0.0, 0.0, 0.0, -0.4480238914489746, 0.5320087909698487, 0.11197414398193359, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.18206262588500977, 0.0, 0.0, 0.47003989219665526, + 0.0, 0.0, 0.4799474716186523, 0.0, -0.13598928451538086, 0.0, 0.32001585960388185, + 0.0, 0.0, -0.0180234432220459, 0.2739488124847412, 0.0, -0.009784841537475586, 0.328018856048584, + 0.5619104862213135, -0.15397567749023436, 0.0, -0.1820129871368408, -0.4320108890533447, + 0.0, -0.5740633964538574, 0.0, -0.04000725746154785, 0.357709264755249, 0.0, 0.4980093955993652, + 0.0, 0.0, 0.0, 0.0, 0.14797711372375488, 0.0, -0.5460730075836181, 0.0, -0.010097551345825195, + 0.0, -0.3060258865356445, 0.0, 0.0, 0.43589115142822266, 0.0, 0.13004593849182128, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04399294853210449, -0.01800827980041504, 0.0, + 0.0, 0.0, 0.0, 0.11791706085205078, 0.0, 0.0, -0.22200379371643067, 0.0, 0.5359618663787842, + 0.0, 0.3999775886535644, -0.11797528266906737, 0.0, 0.0, 0.0, 0.0, 0.2759805679321289, + 0.0, 0.0, -0.4640931129455566, 0.0, 0.22199563980102538, 0.09600462913513183, -0.37601265907287595, + 0.031966781616210936, 0.0, 0.0, -0.02399768829345703, 0.0, -0.2760138988494873, + -0.26015467643737794, 0.0, -0.5380242347717284, 0.0, 0.0, 0.0, 0.0, 0.5258837699890136, + -0.49402198791503904, -0.4300229072570801, 0.0, 0.0, 0.37598676681518556, 0.0, -0.15797080993652343, + 0.0, 0.4800061225891113, 0.0, 0.0, 0.0, -0.15400257110595703, -0.39800362586975097, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2919839859008789, 0.0, 0.0, 0.09398159980773925, + 0.0, 0.0, -0.2760395050048828, 0.0, 0.0, -0.5821004390716552, 0.0, 0.46998925209045406, + 0.1279585361480713, 0.407993745803833, -0.33404088020324707, 0.0, 0.0, 0.07588691711425781, + 0.5299568653106689, 0.0, -0.35857114791870115, -0.10000262260437011, 0.003998422622680664, + 0.0, 0.0, 0.0, 0.0, -0.010011434555053711, -0.2560285091400146, -0.12202091217041015, + 0.6020134449005127, -0.08200879096984863, 0.0, -0.4919833660125732, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, -0.47599811553955074, 0.03193230628967285, 0.0, -0.1920156955718994, + 0.31595921516418457, -0.21797704696655273, 0.0, 0.0, -0.2700041770935058, 0.0, 0.0, + 0.0, -0.22822351455688475, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5620501041412354, 0.4199911594390869, + -0.2520113468170166, 0.22597560882568357, 0.0, 0.2799236297607422, -0.04807877540588379, + 0.0, 0.13398656845092774, 0.0, 0.3419942378997803, -0.31405792236328123, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.2280109405517578, 0.0, 0.0, 0.4000090599060058, 0.17795076370239257, + 0.09999446868896485, 0.0, 0.03599681854248047, 0.0, -0.4380466461181641, 0.0, -0.1100419521331787, + 0.0, 0.0, -0.17595276832580567, -0.3059937000274658, 0.0, 0.0, 0.0, 0.1320566654205322, + 0.0, 0.0, 0.0, 0.0, -0.3560299873352051, 0.0, 0.0, 0.0, 0.0018931388854980468, 0.0, + 0.10197687149047852, 0.0, 0.0, 0.1119173526763916, 0.0, 0.0, 0.3779489994049072, + 0.0, 0.0, 0.5979911327362061, 0.0, -0.4339719772338867, 0.0, 0.0, 0.0, 0.0, 0.5879703998565674, + 0.44398112297058107, 0.23200421333312987, -0.5799877166748046, 0.5160007953643798, + 0.0, 0.14798226356506347, 0.0, -0.35401082038879395, 0.21592626571655274, 0.0, 0.0, + 0.0, -0.34604787826538086, 0.0, 0.0, 0.38199477195739745, -0.14603962898254394, + -0.43001089096069334, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.2380439758300781, + 0.0, 0.0, 0.0, -0.2440260887145996, -0.09001507759094238, 0.0, 0.0, 0.0, 0.0, -0.27601075172424316, + 0.05383443832397461, -0.5041153907775878, 0.23196072578430174, 0.0, 0.0, 0.0, 0.0, + 0.0, -0.4239924430847168, -0.5580485343933105, 0.47000484466552733, 0.0, 0.0, -0.07206115722656249, + 0.5819111824035644, 0.0, 0.0, 0.5719905853271484, 0.0, 0.0, 0.49197549819946285, + 0.0, 0.0, 0.563999605178833, 0.02599024772644043, -0.2719888687133789, 0.3739737510681152, + 0.0, -0.25997371673583985, 0.08602566719055176, 0.0, 0.0, 0.0, -0.26804609298706056, + -0.21595330238342283, 0.0, 0.3819248199462891, 0.0, 0.0, -0.2120427131652832, -0.48602614402770994, + -0.023989534378051756, 0.0, 0.0, 0.1461038589477539, 0.0, -0.3959640026092529, 0.0, + -0.1980062484741211, 0.35798234939575196, 0.0, 0.0, -0.2821011543273926, 0.32799067497253415, + 0.0, 0.0, 0.0, -0.31809210777282715, -0.20601353645324708, 0.0, -0.05204057693481445, + 0.0, 0.0, 0.0, 0.0, -0.420014476776123, 0.0, 0.0, -0.3000040054321289, -0.5660556793212891, + 0.0, 0.0, 0.277996301651001, -0.2700076103210449, 0.22398219108581544, 0.0, 0.0, + -0.3379972457885742, 0.0, 0.0, 0.0, -0.35397634506225584, -0.4540220260620117, 0.0, + 0.24392709732055662, 0.4321130275726318, 0.0, 0.0599743366241455, 0.13202261924743652, + 0.4859583377838135, -0.3940425395965576, 0.0, 0.4320270538330078, 0.0, 0.0, 0.0, + -0.33995490074157714, -0.4440197467803955, 0.0, 0.0, -0.48399982452392576, 0.3899245262145996, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.3860053539276123, 0.34215831756591797, 0.0, 0.0, + 0.0, 0.0, -0.23800649642944335, 0.0, -0.536017656326294, -0.3580926418304443, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.3699025154113769, 0.10400547981262206, 0.0, -0.5460023403167724, + 0.16787495613098144, -0.21796588897705077, -0.3180039882659912, 0.0, 0.0, 0.0, 0.0, + 0.3039816856384277, 0.0, 0.0, 0.05196719169616699, 0.0, 0.1760343074798584, 0.13006410598754883, + 0.023965787887573243, 0.0, 0.3639249801635742, 0.3480075359344482, -0.5400272369384765, + 0.0, 0.5219020843505859]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[5].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[5].band: 5 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[5].bandCenterMHz: 6750.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[5].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[6].CryoChannels.amplitudeScaleArray: '[0, + 0, 12, 0, 12, 0, 0, 12, 0, 0, 12, 0, 12, 0, 0, 12, 12, 0, 12, 12, 12, 0, 0, 12, + 12, 0, 0, 0, 12, 12, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, 12, 0, 0, 12, 0, 0, 0, 12, + 0, 0, 12, 12, 0, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 12, 12, 0, 12, 0, 0, 0, 0, 12, + 12, 0, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 12, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 0, + 0, 0, 12, 0, 0, 0, 0, 12, 12, 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, + 12, 0, 12, 0, 0, 0, 12, 0, 0, 12, 12, 0, 12, 0, 12, 12, 0, 12, 0, 0, 0, 0, 0, 12, + 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 12, 12, 12, 0, 12, 0, 0, 0, 12, 12, 12, 0, 12, + 0, 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 12, 0, 0, 0, 0, 0, + 0, 12, 12, 12, 0, 0, 12, 0, 0, 12, 12, 12, 12, 0, 0, 12, 0, 0, 12, 12, 0, 0, 12, + 12, 12, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 12, 12, 0, 12, 12, 0, 12, 0, 0, 0, 0, 12, + 12, 0, 12, 0, 12, 0, 0, 0, 12, 12, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, 12, 12, 0, 0, + 12, 0, 0, 12, 12, 0, 0, 12, 12, 0, 12, 12, 12, 12, 0, 12, 0, 0, 0, 12, 0, 0, 0, + 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, 12, 12, 12, 12, 0, 12, 12, 0, 12, 0, 12, 0, 0, + 12, 0, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 12, 12, 12, 0, 0, + 12, 12, 0, 12, 12, 0, 12, 0, 0, 0, 12, 0, 12, 12, 12, 0, 12, 0, 0, 12, 12, 12, 0, + 0, 12, 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 0, 12, 0, 0, 0, + 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 12, 0, 0, 12, 12, 12, 12, 0, + 0, 0, 12, 0, 0, 0, 12, 0, 12, 0, 0, 12, 0, 12, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 0, + 12, 0, 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 12, 0, 12, 0, 0, 0, 0, 12, + 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 0, 0, 12, 0, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, + 0, 0, 12, 0, 0, 0, 0, 0, 12, 0, 12, 12, 12, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0, 12, + 0, 12, 0, 12, 12, 12, 0, 0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[6].CryoChannels.centerFrequencyArray: '[0.0, + 0.0, 0.5039669036865234, 0.0, -0.38010463714599607, 0.0, 0.0, 0.5918935775756836, + 0.0, 0.0, 0.3539272785186767, 0.0, 0.19201412200927734, 0.0, 0.0, 0.48393688201904295, + -0.1400130271911621, 0.0, 0.08396387100219727, -0.5980549335479736, -0.30208797454833985, + 0.0, 0.0, -0.1500072956085205, 0.513869047164917, 0.0, 0.0, 0.0, 0.5179505825042724, + 0.5699004650115966, 0.0, -0.20811867713928223, 0.0, 0.0, 0.0, -0.030036020278930663, + 0.46192245483398436, -0.47805876731872554, 0.0, 0.3259024143218994, 0.14594578742980957, + 0.0, 0.0, -0.5000454425811768, 0.0, 0.0, 0.0, -0.016034317016601563, 0.0, 0.0, -0.4200171947479248, + 0.3220024108886719, 0.0, 0.22999219894409179, 0.0, 0.0, -0.007995700836181641, 0.2459632873535156, + 0.0, 0.0, 0.0, -0.6420480251312256, 0.0, 0.42393693923950193, -0.32200284004211427, + 0.0, -0.2059998035430908, 0.0, 0.0, 0.0, 0.0, 0.24805426597595215, -0.2320253849029541, + 0.0, 0.0, 0.0, 0.0, 0.1639756679534912, 0.0, 0.0, 0.2499579906463623, 0.0, 0.0, + 0.0, 0.23006515502929686, -0.3219883918762207, 0.0, -0.34404802322387695, 0.0, 0.0, + 0.0, -0.2119896411895752, -0.36801567077636715, 0.0, 0.0, 0.0, 0.0, 0.0, -0.486036729812622, + 0.0, 0.0, 0.0, 0.0, -0.5601399421691894, 0.5259294033050537, 0.0, 0.10400218963623047, + 0.0, -0.1060945987701416, 0.0, 0.0, 0.0, -0.20998950004577635, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, -0.13215250968933104, -0.04195318222045898, 0.205938720703125, 0.0, -0.2560887336730957, + 0.0, 0.0, 0.0, 0.09991450309753418, 0.0, 0.0, 0.4159342288970947, 0.13388214111328126, + 0.0, 0.1416990280151367, 0.0, -0.11607813835144043, 0.21190881729125977, 0.0, -0.21803297996520996, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.3299271583557129, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.13212475776672364, + 0.20984201431274413, -0.14001574516296386, 0.0, 0.2639616966247558, 0.41196441650390625, + -0.13229413032531737, 0.0, -0.0700451374053955, 0.0, 0.0, 0.0, -0.5319876194000244, + -0.3801414012908935, 0.49396233558654784, 0.0, 0.19599509239196777, 0.0, 0.0, 0.5698233604431152, + 0.0, 0.1839616298675537, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.04600954055786133, + 0.0, 0.3319324493408203, 0.1420063018798828, 0.0, 0.0, 0.3519285678863525, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.14597439765930176, 0.33997049331665036, 0.47595748901367185, + 0.0, 0.0, -0.24804053306579588, 0.0, 0.0, 0.0941162109375, 0.023981809616088867, + -0.040093088150024415, -0.48598365783691405, 0.0, 0.0, -0.4200739860534668, 0.0, + 0.0, 0.07814183235168456, -0.39612135887145994, 0.0, 0.0, -0.4441363334655762, -0.4760693550109863, + 0.5999480724334717, 0.0, 0.0, 0.0, 0.19394960403442382, 0.0, 0.3538634777069092, + 0.5999359130859375, 0.0, 0.0, 0.0, -0.26405396461486813, -0.07607560157775879, 0.0, + 0.3040194511413574, -0.35810179710388185, 0.0, -0.21006174087524412, 0.0, 0.0, 0.0, + 0.0, 0.29393391609191893, -0.2979755401611328, 0.0, 0.43389015197753905, 0.0, 0.5700048923492431, + 0.0, 0.0, 0.0, 0.2080063819885254, -0.028084945678710938, 0.0, 0.0, -0.4700577735900879, + 0.0, 0.0, 0.0, -0.15599341392517088, 0.0, 0.0, 0.0, 0.4679786682128906, -0.5519665718078614, + 0.0, 0.0, 0.07602138519287109, 0.0, 0.0, 0.08594355583190917, 0.5799530982971192, + 0.0, 0.0, 0.3300523281097412, 0.12796425819396973, 0.0, 0.5739562511444092, 0.2119514465332031, + -0.19618964195251465, 0.20598793029785156, 0.0, 0.34396791458129883, 0.0, 0.0, 0.0, + -0.02210097312927246, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1299147605895996, 0.0, 0.0, + -0.17796220779418945, 0.0, 0.0, 0.0, 0.0, 0.023998403549194334, 0.2939643859863281, + -0.22413768768310546, -0.38799176216125486, 0.0, 0.34978466033935546, 0.4358675479888916, + 0.0, 0.5159083843231201, 0.0, -0.11005825996398926, 0.0, 0.0, -0.362166166305542, + 0.0, 0.17999639511108398, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0779911994934082, 0.33191914558410646, + 0.0, 0.0, 0.3039876937866211, 0.5241703033447266, 0.0, 0.0, 0.0, -0.1920168399810791, + 0.0, 0.1680068492889404, -0.5820167541503906, -0.34809236526489257, 0.0, 0.0, -0.05815801620483398, + 0.24593009948730468, 0.0, -0.4440482139587402, -0.5439677238464355, 0.0, 0.17398467063903808, + 0.0, 0.0, 0.0, -0.4520692348480224, 0.0, 0.2999831199645996, 0.25598831176757814, + 0.02594475746154785, 0.0, -0.07608675956726074, 0.0, 0.0, -0.14205293655395507, + -0.4120110511779785, -0.5640522480010987, 0.0, 0.0, 0.02793917655944824, 0.0, 0.0, + 0.0, 0.021985673904418943, 0.4358993053436279, -0.2000291347503662, 0.0, 0.0, 0.0, + 0.0, -0.15009841918945313, 0.0, 0.0, 0.0, 0.0, 0.4580826759338379, 0.0, 0.0, 0.0, + 0.5619380950927734, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.19198451042175294, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.2559103488922119, 0.10997242927551269, 0.025971221923828124, + 0.0, 0.0, 0.406047248840332, 0.0, 0.0, 0.4758736610412597, 0.219871187210083, -0.2340830326080322, + 0.2840170383453369, 0.0, 0.0, 0.0, 0.333984375, 0.0, 0.0, 0.0, 0.3119520664215088, + 0.0, 0.14195451736450196, 0.0, 0.0, -0.47000284194946285, 0.0, 0.5578781604766846, + 0.0, 0.0, 0.0, -0.18594818115234374, 0.0, 0.0, 0.4679075717926025, 0.0, 0.0, 0.0, + 0.0, 0.2680246353149414, 0.0, 0.15795607566833494, 0.0, -0.36006803512573243, 0.48595504760742186, + 0.0, 0.0, 0.30400586128234863, -0.5100378513336181, 0.0, 0.0, 0.0, 0.0, 0.0, -0.36997117996215817, + -0.3999964714050293, 0.0, -0.201904821395874, 0.0, 0.0, 0.0, 0.0, 0.4280223369598389, + 0.0, 0.0, 0.5640488147735595, 0.0, 0.46211442947387693, 0.0, 0.0, -0.4360799789428711, + 0.0, 0.0, 0.0, 0.0, -0.4600188732147217, 0.0, 0.0, 0.08798260688781738, -0.20009236335754393, + 0.0, 0.0, -0.4339606761932373, 0.17992701530456542, 0.0, 0.0, 0.0, 0.0, 0.0, -0.44997324943542477, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.3179603576660156, 0.0, 0.33995690345764157, 0.5119348526000976, + 0.1379377841949463, 0.0, 0.0, 0.0, 0.3697862148284912, 0.0, 0.0, 0.5099264144897461, + 0.0, 0.0, 0.0, -0.40003023147583006, 0.0, 0.3819399833679199, 0.0, 0.20797934532165527, + -0.07608790397644043, -9.012222290039062e-06, 0.0, 0.0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[6].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[6].band: 6 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[6].bandCenterMHz: 7250.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[6].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[7].CryoChannels.amplitudeScaleArray: '[12, + 0, 0, 12, 0, 12, 0, 0, 12, 0, 12, 12, 0, 12, 0, 12, 12, 0, 0, 12, 0, 12, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 12, 12, 0, 12, 0, 0, + 0, 0, 12, 0, 0, 0, 12, 0, 0, 12, 12, 0, 0, 12, 0, 0, 12, 12, 0, 12, 0, 12, 0, 0, + 0, 12, 0, 12, 0, 12, 12, 0, 0, 12, 12, 12, 0, 12, 12, 0, 0, 0, 0, 12, 0, 12, 0, + 0, 12, 0, 12, 12, 0, 12, 12, 0, 0, 12, 0, 0, 0, 0, 0, 0, 12, 0, 12, 12, 0, 12, 0, + 12, 0, 12, 0, 0, 0, 0, 12, 0, 12, 0, 0, 0, 0, 12, 12, 0, 12, 0, 12, 12, 0, 12, 0, + 0, 0, 0, 12, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 12, 12, 0, 12, 0, 12, 12, 0, 0, 0, + 0, 0, 0, 0, 12, 0, 12, 0, 0, 12, 0, 0, 0, 0, 0, 12, 12, 0, 0, 12, 0, 0, 12, 0, 0, + 0, 12, 12, 0, 0, 12, 0, 0, 12, 0, 0, 12, 0, 12, 12, 0, 12, 12, 12, 12, 0, 12, 0, + 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 12, 0, 0, 0, 12, 12, 0, 0, 12, + 0, 12, 0, 0, 12, 0, 0, 0, 12, 0, 12, 12, 0, 0, 12, 0, 0, 12, 12, 0, 0, 0, 0, 0, + 0, 0, 12, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 12, 0, 0, 0, 12, 0, 0, 12, 0, + 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 12, 12, 0, 0, 12, 12, 0, 12, 0, 12, 12, 0, 12, + 0, 12, 0, 12, 0, 12, 0, 0, 0, 0, 0, 0, 12, 12, 0, 12, 12, 0, 12, 12, 12, 0, 0, 12, + 0, 0, 12, 12, 12, 12, 0, 0, 0, 12, 0, 12, 0, 0, 0, 12, 12, 0, 12, 12, 0, 0, 0, 0, + 0, 0, 0, 0, 12, 12, 0, 12, 12, 0, 12, 12, 12, 0, 0, 12, 12, 12, 0, 0, 12, 12, 0, + 0, 0, 0, 0, 12, 12, 12, 0, 0, 0, 0, 12, 12, 12, 12, 0, 0, 0, 0, 12, 12, 0, 0, 0, + 0, 12, 12, 0, 12, 0, 12, 0, 0, 12, 0, 12, 12, 0, 0, 0, 12, 12, 0, 0, 12, 12, 0, + 0, 0, 12, 0, 0, 0, 0, 12, 0, 12, 12, 0, 0, 12, 12, 12, 0, 12, 0, 0, 12, 12, 0, 12, + 0, 0, 12, 0, 12, 12, 12, 12, 0, 0, 0, 0, 0, 0, 0, 12, 0, 12, 12, 12, 0, 0, 0, 12, + 0, 0, 0, 0, 12, 12, 12, 12, 0, 0, 12, 0, 0, 12, 12, 12, 0, 12, 12, 0, 0, 12, 12, + 12, 0, 0, 0, 12, 0, 0, 12, 0, 0, 0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[7].CryoChannels.centerFrequencyArray: '[-0.2301185131072998, + 0.0, 0.0, -0.15607938766479493, 0.0, -0.24399962425231933, 0.0, 0.0, 0.4776723861694336, + 0.0, 0.4180292129516601, -0.032012987136840816, 0.0, 0.5399136543273926, 0.0, 0.07378950119018554, + -0.46209053993225097, 0.0, 0.0, 0.4417738437652588, 0.0, 0.13999271392822266, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1979729175567627, 0.549858283996582, + -0.006031036376953125, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.19591341018676758, 0.25798444747924804, + 0.0, -0.5679967403411865, 0.0, 0.0, 0.0, 0.0, -0.27600917816162107, 0.0, 0.0, 0.0, + -0.17799110412597655, 0.0, 0.0, -0.24803338050842283, -0.5319917678833007, 0.0, + 0.0, 0.5840289115905761, 0.0, 0.0, -0.5839706897735596, 0.4779839515686035, 0.0, + 0.008004426956176758, 0.0, -0.3099816799163818, 0.0, 0.0, 0.0, 0.015988969802856443, + 0.0, -0.41199474334716796, 0.0, -0.020050621032714842, -0.3780919075012207, 0.0, + 0.0, -0.49403672218322753, -0.12602033615112304, -0.4320071697235107, 0.0, 0.3519742012023926, + 0.48991742134094235, 0.0, 0.0, 0.0, 0.0, 0.250010347366333, 0.0, -0.5800901412963867, + 0.0, 0.0, 0.003912448883056641, 0.0, -0.4580477714538574, 0.4279036045074463, 0.0, + 0.13187499046325682, 0.17000770568847656, 0.0, 0.0, -0.36193127632141114, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, -0.31604862213134766, 0.0, -0.166005277633667, 0.1800333023071289, + 0.0, -0.024072647094726562, 0.0, 0.028005695343017577, 0.0, 0.2379890441894531, + 0.0, 0.0, 0.0, 0.0, -0.3844604015350342, 0.0, 0.027923011779785154, 0.0, 0.0, 0.0, + 0.0, -0.4480114459991455, -0.2682154655456543, 0.0, -0.4860175609588623, 0.0, 0.05992670059204101, + -0.1239781379699707, 0.0, 0.23189249038696289, 0.0, 0.0, 0.0, 0.0, 0.04796633720397949, + -0.4340044498443603, 0.0, 0.0, 0.0, 0.0, 0.0, -0.26601405143737794, -0.02209639549255371, + 0.0, 0.0, -0.09609174728393555, -0.4820354461669922, 0.0, -0.03411641120910645, + 0.0, -0.35001382827758787, 0.3813953876495361, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.14395136833190916, 0.0, -0.2440183639526367, 0.0, 0.0, -0.21807188987731932, 0.0, + 0.0, 0.0, 0.0, 0.0, -0.3680415630340576, -0.1262665271759033, 0.0, 0.0, -0.4360048770904541, + 0.0, 0.0, 0.18200898170471191, 0.0, 0.0, 0.0, 0.1519723892211914, -0.3499244213104248, + 0.0, 0.0, 0.536053705215454, 0.0, 0.0, 0.19802069664001465, 0.0, 0.0, -0.32405920028686525, + 0.0, 0.52396559715271, 0.06805629730224609, 0.0, -0.49397907257080076, -0.16387495994567872, + 0.19793744087219237, 0.4679431915283203, 0.0, -0.04404158592224121, 0.0, 0.0, 0.0, + 0.0, 0.0, -0.47609238624572753, 0.0, 0.49793272018432616, 0.3359549045562744, 0.0, + 0.0, 0.0, -0.580100154876709, 0.0, 0.0, -0.5221139430999756, 0.4539821147918701, + 0.0, 0.0, 0.0, -0.48798379898071287, -0.37797975540161133, 0.0, 0.0, -0.31406264305114745, + 0.0, 0.5279717445373535, 0.0, 0.0, 0.37993426322937013, 0.0, 0.0, 0.0, -0.01204833984375, + 0.0, 0.25196056365966796, -0.23605785369873045, 0.0, 0.0, -0.5601523876190185, 0.0, + 0.0, -0.5100384235382079, 0.46376953125, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.22591867446899414, + -0.5699890136718749, 0.0, 0.0, -0.13402175903320312, 0.19604015350341797, 0.0, 0.0, + 0.0, -0.5820271968841553, 0.0, 0.0, 0.21997318267822266, -0.22393555641174315, 0.0, + 0.0, 0.0, -0.12199072837829589, 0.0, 0.0, -0.4400221824645996, 0.0, 0.0, 0.0, 0.0, + 0.0, -0.3300431728363037, 0.0, -0.3959661483764648, 0.09200763702392578, 0.0, 0.0, + -0.09799032211303711, 0.3660275459289551, 0.0, 0.0, -0.3840036392211914, -0.45807838439941406, + 0.0, -0.15603947639465332, 0.0, -0.13202233314514158, 0.05595216751098633, 0.0, + -0.00809183120727539, 0.0, -0.46390285491943356, 0.0, 0.10997128486633301, 0.0, + -0.384027099609375, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.5701427936553954, 0.5040065288543701, + 0.0, -0.13406596183776856, -0.5900799751281738, 0.0, -0.5860655307769775, 0.28993906974792477, + -0.6000061511993408, 0.0, 0.0, 0.031925296783447264, 0.0, 0.0, -0.19603829383850097, + 0.08582239151000977, -0.2960050106048584, -0.09795584678649902, 0.0, 0.0, 0.0, -0.11003608703613281, + 0.0, -0.2580536842346191, 0.0, 0.0, 0.0, -0.11206483840942383, 0.2079627513885498, + 0.0, 0.5620273590087891, -0.45375809669494627, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, -0.4919899463653564, -0.5200714588165283, 0.0, 0.397979736328125, 0.37598090171813964, + 0.0, -0.4360645294189453, 0.03794031143188476, 0.5999460697174072, 0.0, 0.0, 0.46196866035461426, + 0.4918593406677246, -0.2338557243347168, 0.0, 0.0, -0.34403285980224607, -0.5740236282348633, + 0.0, 0.0, 0.0, 0.0, 0.0, -0.03804559707641601, -0.05803184509277343, -0.5480097770690918, + 0.0, 0.0, 0.0, 0.0, 0.1119532585144043, -0.10989117622375488, 0.013919878005981445, + 0.13800601959228515, 0.0, 0.0, 0.0, 0.0, 0.16797008514404296, -0.4400623798370361, + 0.0, 0.0, 0.0, 0.0, 0.1819507598876953, 0.5400060653686524, 0.0, -0.010105276107788086, + 0.0, -0.2320007801055908, 0.0, 0.0, 0.30600900650024415, 0.0, 0.46599283218383786, + 0.5058833599090576, 0.0, 0.0, 0.0, -0.3720407009124756, 0.12396755218505859, 0.0, + 0.0, -0.17004432678222656, 0.01993732452392578, 0.0, 0.0, 0.0, -0.08998904228210448, + 0.0, 0.0, 0.0, 0.0, -0.4022345066070557, 0.0, -0.03607220649719238, -0.35866355895996094, + 0.0, 0.0, -0.33205175399780273, -0.1961513042449951, 0.4657733917236328, 0.0, 0.05395717620849609, + 0.0, 0.0, 0.3879518508911133, 0.046004962921142575, 0.0, -0.5880527973175048, 0.0, + 0.0, 0.5899269104003906, 0.0, -0.19203858375549315, -0.3799890518188476, -0.4660473346710205, + -0.25206527709960935, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1939631938934326, 0.0, + 0.3279550552368164, 0.19196391105651855, -0.10207257270812987, 0.0, 0.0, 0.0, 0.5639925956726074, + 0.0, 0.0, 0.0, 0.0, 0.2860107421875, 0.06384100914001464, -0.522075891494751, -0.41202378273010254, + 0.0, 0.0, 0.3659628868103027, 0.0, 0.0, -0.43611845970153806, 0.5719902992248534, + 0.4818826675415039, 0.0, -0.11006054878234862, -0.06198635101318359, 0.0, 0.0, -0.48401498794555664, + 0.08582124710083007, 0.2339292526245117, 0.0, 0.0, 0.0, 0.019986820220947266, 0.0, + 0.0, -0.33008694648742676, 0.0, 0.0, 0.0]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[7].ScratchPad: 3735928559 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[7].band: 7 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[7].bandCenterMHz: 7750.0 +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.Base[7].toneFrequencyOffsetMHz: '[0.0, + 307.2, 153.6, -153.6, 76.8, -230.4, 230.4, -76.8, 38.4, -268.8, 192.0, -115.2, 115.2, + -192.0, 268.8, -38.4, 19.2, -288.0, 172.8, -134.4, 96.0, -211.2, 249.6, -57.6, 57.6, + -249.6, 211.2, -96.0, 134.4, -172.8, 288.0, -19.2, 9.6, -297.6, 163.2, -144.0, 86.4, + -220.8, 240.0, -67.2, 48.0, -259.2, 201.6, -105.6, 124.8, -182.4, 278.4, -28.8, + 28.8, -278.4, 182.4, -124.8, 105.6, -201.6, 259.2, -48.0, 67.2, -240.0, 220.8, -86.4, + 144.0, -163.2, 297.6, -9.6, 4.8, -302.4, 158.4, -148.8, 81.6, -225.6, 235.2, -72.0, + 43.2, -264.0, 196.8, -110.4, 120.0, -187.2, 273.6, -33.6, 24.0, -283.2, 177.6, -129.6, + 100.8, -206.4, 254.4, -52.8, 62.4, -244.8, 216.0, -91.2, 139.2, -168.0, 292.8, -14.4, + 14.4, -292.8, 168.0, -139.2, 91.2, -216.0, 244.8, -62.4, 52.8, -254.4, 206.4, -100.8, + 129.6, -177.6, 283.2, -24.0, 33.6, -273.6, 187.2, -120.0, 110.4, -196.8, 264.0, + -43.2, 72.0, -235.2, 225.6, -81.6, 148.8, -158.4, 302.4, -4.8, 2.4, -304.8, 156.0, + -151.2, 79.2, -228.0, 232.8, -74.4, 40.8, -266.4, 194.4, -112.8, 117.6, -189.6, + 271.2, -36.0, 21.6, -285.6, 175.2, -132.0, 98.4, -208.8, 252.0, -55.2, 60.0, -247.2, + 213.6, -93.6, 136.8, -170.4, 290.4, -16.8, 12.0, -295.2, 165.6, -141.6, 88.8, -218.4, + 242.4, -64.8, 50.4, -256.8, 204.0, -103.2, 127.2, -180.0, 280.8, -26.4, 31.2, -276.0, + 184.8, -122.4, 108.0, -199.2, 261.6, -45.6, 69.6, -237.6, 223.2, -84.0, 146.4, -160.8, + 300.0, -7.2, 7.2, -300.0, 160.8, -146.4, 84.0, -223.2, 237.6, -69.6, 45.6, -261.6, + 199.2, -108.0, 122.4, -184.8, 276.0, -31.2, 26.4, -280.8, 180.0, -127.2, 103.2, + -204.0, 256.8, -50.4, 64.8, -242.4, 218.4, -88.8, 141.6, -165.6, 295.2, -12.0, 16.8, + -290.4, 170.4, -136.8, 93.6, -213.6, 247.2, -60.0, 55.2, -252.0, 208.8, -98.4, 132.0, + -175.2, 285.6, -21.6, 36.0, -271.2, 189.6, -117.6, 112.8, -194.4, 266.4, -40.8, + 74.4, -232.8, 228.0, -79.2, 151.2, -156.0, 304.8, -2.4, 1.2, -306.0, 154.8, -152.4, + 78.0, -229.2, 231.6, -75.6, 39.6, -267.6, 193.2, -114.0, 116.4, -190.8, 270.0, -37.2, + 20.4, -286.8, 174.0, -133.2, 97.2, -210.0, 250.8, -56.4, 58.8, -248.4, 212.4, -94.8, + 135.6, -171.6, 289.2, -18.0, 10.8, -296.4, 164.4, -142.8, 87.6, -219.6, 241.2, -66.0, + 49.2, -258.0, 202.8, -104.4, 126.0, -181.2, 279.6, -27.6, 30.0, -277.2, 183.6, -123.6, + 106.8, -200.4, 260.4, -46.8, 68.4, -238.8, 222.0, -85.2, 145.2, -162.0, 298.8, -8.4, + 6.0, -301.2, 159.6, -147.6, 82.8, -224.4, 236.4, -70.8, 44.4, -262.8, 198.0, -109.2, + 121.2, -186.0, 274.8, -32.4, 25.2, -282.0, 178.8, -128.4, 102.0, -205.2, 255.6, + -51.6, 63.6, -243.6, 217.2, -90.0, 140.4, -166.8, 294.0, -13.2, 15.6, -291.6, 169.2, + -138.0, 92.4, -214.8, 246.0, -61.2, 54.0, -253.2, 207.6, -99.6, 130.8, -176.4, 284.4, + -22.8, 34.8, -272.4, 188.4, -118.8, 111.6, -195.6, 265.2, -42.0, 73.2, -234.0, 226.8, + -80.4, 150.0, -157.2, 303.6, -3.6, 3.6, -303.6, 157.2, -150.0, 80.4, -226.8, 234.0, + -73.2, 42.0, -265.2, 195.6, -111.6, 118.8, -188.4, 272.4, -34.8, 22.8, -284.4, 176.4, + -130.8, 99.6, -207.6, 253.2, -54.0, 61.2, -246.0, 214.8, -92.4, 138.0, -169.2, 291.6, + -15.6, 13.2, -294.0, 166.8, -140.4, 90.0, -217.2, 243.6, -63.6, 51.6, -255.6, 205.2, + -102.0, 128.4, -178.8, 282.0, -25.2, 32.4, -274.8, 186.0, -121.2, 109.2, -198.0, + 262.8, -44.4, 70.8, -236.4, 224.4, -82.8, 147.6, -159.6, 301.2, -6.0, 8.4, -298.8, + 162.0, -145.2, 85.2, -222.0, 238.8, -68.4, 46.8, -260.4, 200.4, -106.8, 123.6, -183.6, + 277.2, -30.0, 27.6, -279.6, 181.2, -126.0, 104.4, -202.8, 258.0, -49.2, 66.0, -241.2, + 219.6, -87.6, 142.8, -164.4, 296.4, -10.8, 18.0, -289.2, 171.6, -135.6, 94.8, -212.4, + 248.4, -58.8, 56.4, -250.8, 210.0, -97.2, 133.2, -174.0, 286.8, -20.4, 37.2, -270.0, + 190.8, -116.4, 114.0, -193.2, 267.6, -39.6, 75.6, -231.6, 229.2, -78.0, 152.4, -154.8, + 306.0, -1.2]' +AMCc.FpgaTopLevel.AppTop.AppCore.SysgenCryo.tuneFilePath: /data/smurf_data/tune/1648246303_tune.npy +AMCc.FpgaTopLevel.AppTop.AppCore.enableStreaming: 1 +AMCc.RogueDirectory: /usr/local/src/rogue/python/pyrogue +AMCc.RogueVersion: v4.11.8 +AMCc.SmurfProcessor.ChannelMapper.Mask: '[2, 7, 10, 11, 12, 15, 16, 19, 20, 21, 24, + 27, 29, 31, 35, 36, 37, 39, 44, 48, 50, 53, 55, 56, 59, 61, 63, 67, 71, 72, 75, + 76, 82, 85, 87, 88, 89, 92, 93, 95, 96, 98, 100, 101, 103, 107, 108, 109, 112, 114, + 116, 119, 124, 125, 128, 130, 132, 135, 138, 139, 140, 143, 144, 146, 147, 149, + 155, 162, 163, 164, 165, 167, 168, 171, 172, 178, 183, 185, 187, 188, 189, 191, + 194, 195, 196, 199, 200, 207, 208, 210, 212, 215, 217, 221, 223, 224, 227, 228, + 232, 236, 237, 240, 242, 247, 249, 251, 252, 256, 260, 266, 275, 276, 280, 281, + 283, 285, 287, 290, 291, 292, 296, 299, 301, 303, 304, 306, 309, 311, 313, 316, + 317, 319, 322, 324, 330, 331, 332, 335, 336, 338, 339, 344, 345, 351, 355, 356, + 357, 360, 363, 368, 370, 372, 377, 379, 381, 383, 386, 388, 394, 395, 396, 399, + 400, 403, 404, 408, 409, 413, 415, 418, 419, 421, 423, 424, 427, 429, 432, 435, + 443, 444, 445, 448, 450, 455, 456, 459, 460, 463, 464, 467, 469, 472, 477, 480, + 485, 491, 492, 493, 495, 499, 500, 503, 505, 507, 508, 514, 516, 519, 522, 523, + 524, 525, 530, 531, 532, 533, 535, 539, 540, 541, 546, 549, 555, 556, 563, 565, + 569, 580, 582, 583, 586, 587, 588, 595, 596, 597, 601, 603, 605, 610, 611, 615, + 619, 620, 621, 626, 629, 635, 637, 642, 645, 650, 652, 653, 658, 661, 665, 669, + 675, 676, 679, 682, 683, 684, 690, 697, 701, 706, 707, 708, 709, 711, 714, 715, + 724, 729, 731, 733, 739, 740, 741, 743, 747, 748, 756, 757, 764, 765, 770, 773, + 775, 778, 779, 780, 787, 788, 795, 797, 802, 803, 807, 810, 811, 818, 819, 821, + 825, 829, 834, 836, 837, 839, 842, 844, 851, 852, 853, 857, 861, 866, 868, 870, + 871, 875, 877, 882, 889, 891, 892, 898, 903, 907, 915, 916, 917, 923, 930, 932, + 934, 938, 946, 953, 957, 962, 963, 964, 967, 971, 972, 973, 979, 981, 989, 994, + 996, 997, 998, 1004, 1005, 1010, 1012, 1013, 1017, 1019, 1021, 1024, 1029, 1036, + 1040, 1043, 1044, 1047, 1049, 1051, 1052, 1055, 1060, 1067, 1068, 1069, 1071, 1076, + 1077, 1080, 1085, 1087, 1088, 1092, 1095, 1096, 1100, 1103, 1108, 1111, 1115, 1117, + 1123, 1124, 1125, 1127, 1128, 1133, 1141, 1143, 1144, 1145, 1147, 1152, 1156, 1157, + 1159, 1160, 1167, 1171, 1172, 1175, 1179, 1183, 1188, 1189, 1192, 1195, 1197, 1199, + 1200, 1204, 1205, 1211, 1215, 1223, 1227, 1228, 1229, 1232, 1240, 1245, 1247, 1248, + 1251, 1253, 1256, 1260, 1261, 1264, 1267, 1268, 1269, 1271, 1273, 1279, 1280, 1283, + 1284, 1287, 1291, 1292, 1293, 1295, 1296, 1299, 1301, 1303, 1304, 1305, 1307, 1308, + 1309, 1317, 1319, 1320, 1324, 1328, 1335, 1336, 1337, 1339, 1344, 1348, 1349, 1355, + 1357, 1360, 1363, 1364, 1365, 1367, 1368, 1369, 1371, 1372, 1375, 1376, 1380, 1383, + 1387, 1388, 1391, 1392, 1396, 1400, 1403, 1405, 1407, 1411, 1419, 1420, 1421, 1424, + 1427, 1429, 1432, 1433, 1436, 1437, 1439, 1443, 1447, 1451, 1452, 1453, 1456, 1461, + 1463, 1464, 1469, 1471, 1475, 1476, 1477, 1479, 1480, 1487, 1491, 1492, 1493, 1495, + 1501, 1508, 1511, 1512, 1515, 1517, 1519, 1520, 1523, 1529, 1531, 1533, 1538, 1540, + 1546, 1547, 1548, 1552, 1554, 1555, 1560, 1568, 1570, 1572, 1575, 1576, 1579, 1580, + 1581, 1584, 1586, 1588, 1592, 1595, 1596, 1597, 1599, 1603, 1604, 1607, 1619, 1623, + 1628, 1635, 1636, 1643, 1644, 1648, 1651, 1659, 1660, 1661, 1666, 1668, 1672, 1675, + 1679, 1680, 1684, 1691, 1695, 1696, 1698, 1700, 1703, 1704, 1708, 1709, 1711, 1715, + 1716, 1724, 1727, 1730, 1731, 1732, 1739, 1741, 1743, 1752, 1755, 1757, 1759, 1762, + 1764, 1771, 1775, 1778, 1780, 1784, 1787, 1791, 1795, 1799, 1802, 1804, 1808, 1810, + 1811, 1812, 1816, 1821, 1827, 1832, 1835, 1842, 1851, 1853, 1858, 1860, 1863, 1867, + 1868, 1869, 1871, 1874, 1883, 1884, 1885, 1887, 1888, 1891, 1895, 1901, 1903, 1906, + 1907, 1908, 1911, 1916, 1917, 1919, 1922, 1923, 1927, 1930, 1931, 1932, 1933, 1938, + 1940, 1948, 1949, 1955, 1956, 1959, 1963, 1964, 1965, 1967, 1970, 1972, 1979, 1984, + 1991, 1996, 2000, 2002, 2004, 2007, 2008, 2012, 2013, 2019, 2020, 2027, 2029, 2032, + 2035, 2037, 2039, 2040, 2043, 2044, 2045, 2048, 2052, 2055, 2060, 2067, 2068, 2072, + 2077, 2079, 2082, 2083, 2087, 2091, 2092, 2093, 2095, 2099, 2100, 2101, 2103, 2104, + 2105, 2107, 2108, 2109, 2114, 2115, 2116, 2119, 2120, 2122, 2123, 2127, 2128, 2132, + 2133, 2135, 2136, 2137, 2139, 2140, 2141, 2151, 2152, 2160, 2163, 2165, 2167, 2169, + 2171, 2172, 2175, 2176, 2179, 2181, 2187, 2188, 2191, 2194, 2197, 2200, 2201, 2203, + 2204, 2207, 2208, 2211, 2212, 2213, 2215, 2216, 2218, 2219, 2220, 2221, 2223, 2226, + 2227, 2228, 2229, 2235, 2236, 2237, 2240, 2242, 2243, 2244, 2245, 2247, 2248, 2252, + 2255, 2256, 2261, 2263, 2264, 2269, 2271, 2272, 2275, 2276, 2280, 2285, 2287, 2288, + 2290, 2292, 2295, 2301, 2303, 2306, 2307, 2308, 2309, 2311, 2312, 2314, 2315, 2317, + 2319, 2320, 2324, 2325, 2327, 2328, 2331, 2332, 2333, 2338, 2344, 2346, 2352, 2354, + 2359, 2360, 2367, 2368, 2373, 2375, 2379, 2380, 2386, 2387, 2393, 2395, 2399, 2402, + 2403, 2404, 2405, 2407, 2411, 2412, 2413, 2418, 2419, 2420, 2421, 2425, 2427, 2428, + 2429, 2432, 2434, 2436, 2440, 2442, 2444, 2448, 2450, 2452, 2455, 2461, 2463, 2464, + 2466, 2467, 2468, 2469, 2475, 2476, 2479, 2480, 2484, 2487, 2488, 2489, 2493, 2495, + 2496, 2500, 2501, 2504, 2506, 2507, 2512, 2514, 2515, 2516, 2517, 2521, 2523, 2524, + 2527, 2528, 2530, 2533, 2535, 2539, 2540, 2544, 2546, 2547, 2549, 2551, 2553, 2555, + 2560, 2563, 2568, 2571, 2575, 2578, 2579, 2580, 2581, 2583, 2585, 2588, 2591, 2597, + 2600, 2602, 2603, 2604, 2607, 2610, 2611, 2612, 2613, 2615, 2617, 2619, 2623, 2624, + 2631, 2635, 2636, 2637, 2645, 2648, 2651, 2653, 2655, 2658, 2659, 2661, 2662, 2663, + 2664, 2666, 2667, 2669, 2671, 2672, 2674, 2679, 2681, 2683, 2685, 2688, 2690, 2698, + 2699, 2704, 2707, 2709, 2711, 2712, 2717, 2720, 2722, 2723, 2724, 2725, 2728, 2730, + 2731, 2733, 2738, 2739, 2740, 2743, 2745, 2747, 2751, 2752, 2759, 2762, 2765, 2768, + 2770, 2771, 2772, 2773, 2776, 2777, 2779, 2780, 2781, 2786, 2787, 2788, 2789, 2790, + 2792, 2799, 2800, 2802, 2803, 2804, 2807, 2811, 2818, 2819, 2820, 2821, 2823, 2824, + 2826, 2828, 2829, 2836, 2839, 2840, 2841, 2843, 2845, 2847, 2850, 2851, 2855, 2860, + 2864, 2866, 2869, 2872, 2875, 2877, 2882, 2883, 2884, 2885, 2886, 2888, 2890, 2891, + 2895, 2898, 2899, 2900, 2912, 2916, 2917, 2922, 2923, 2924, 2925, 2931, 2932, 2933, + 2936, 2937, 2940, 2943, 2946, 2947, 2948, 2949, 2951, 2952, 2956, 2957, 2959, 2962, + 2963, 2964, 2967, 2969, 2971, 2972, 2975, 2976, 2980, 2981, 2983, 2988, 2991, 2992, + 2995, 2996, 2997, 3000, 3004, 3005, 3007, 3008, 3010, 3011, 3012, 3013, 3015, 3019, + 3020, 3023, 3024, 3031, 3032, 3037, 3039, 3040, 3047, 3048, 3050, 3051, 3052, 3053, + 3058, 3061, 3063, 3064, 3065, 3067, 3068, 3069, 3071, 3074, 3076, 3079, 3082, 3084, + 3087, 3088, 3090, 3091, 3092, 3095, 3096, 3100, 3101, 3103, 3107, 3108, 3109, 3111, + 3112, 3115, 3119, 3122, 3123, 3125, 3128, 3129, 3133, 3135, 3136, 3138, 3143, 3144, + 3149, 3152, 3156, 3157, 3159, 3163, 3164, 3170, 3175, 3176, 3178, 3180, 3184, 3191, + 3192, 3193, 3195, 3199, 3202, 3203, 3205, 3207, 3208, 3210, 3216, 3223, 3224, 3225, + 3227, 3228, 3229, 3231, 3235, 3236, 3237, 3239, 3242, 3244, 3253, 3255, 3256, 3259, + 3266, 3267, 3268, 3271, 3274, 3275, 3276, 3277, 3280, 3283, 3284, 3287, 3288, 3289, + 3293, 3295, 3296, 3300, 3301, 3303, 3304, 3306, 3311, 3312, 3314, 3316, 3320, 3321, + 3324, 3328, 3332, 3333, 3336, 3339, 3340, 3343, 3344, 3346, 3347, 3348, 3349, 3351, + 3355, 3362, 3365, 3370, 3371, 3372, 3373, 3375, 3376, 3378, 3380, 3383, 3385, 3391, + 3392, 3395, 3396, 3400, 3402, 3403, 3404, 3407, 3408, 3410, 3411, 3413, 3417, 3419, + 3420, 3421, 3423, 3426, 3427, 3428, 3431, 3435, 3436, 3437, 3442, 3447, 3451, 3460, + 3466, 3467, 3468, 3471, 3474, 3475, 3476, 3477, 3481, 3485, 3487, 3490, 3492, 3496, + 3499, 3504, 3506, 3508, 3509, 3512, 3513, 3519, 3520, 3522, 3527, 3530, 3532, 3535, + 3540, 3543, 3544, 3547, 3548, 3554, 3560, 3562, 3563, 3564, 3568, 3571, 3575, 3577, + 3579, 3580, 3581, 3584, 3587, 3589, 3592, 3594, 3595, 3597, 3599, 3600, 3603, 3605, + 3618, 3619, 3620, 3628, 3629, 3631, 3636, 3640, 3643, 3644, 3647, 3650, 3651, 3653, + 3655, 3659, 3661, 3663, 3664, 3667, 3668, 3669, 3671, 3672, 3677, 3679, 3682, 3684, + 3685, 3687, 3688, 3691, 3698, 3700, 3701, 3703, 3705, 3707, 3712, 3714, 3719, 3720, + 3722, 3724, 3725, 3727, 3732, 3733, 3739, 3740, 3743, 3744, 3746, 3748, 3749, 3757, + 3759, 3762, 3768, 3769, 3772, 3775, 3779, 3780, 3783, 3786, 3789, 3791, 3792, 3794, + 3795, 3796, 3797, 3799, 3805, 3807, 3808, 3812, 3815, 3816, 3820, 3821, 3824, 3826, + 3829, 3833, 3835, 3836, 3839, 3842, 3843, 3851, 3852, 3855, 3856, 3860, 3863, 3864, + 3868, 3871, 3877, 3879, 3880, 3883, 3884, 3887, 3888, 3890, 3892, 3893, 3895, 3897, + 3899, 3901, 3908, 3909, 3911, 3912, 3914, 3915, 3916, 3919, 3922, 3923, 3924, 3925, + 3929, 3931, 3935, 3936, 3938, 3939, 3948, 3949, 3951, 3952, 3954, 3955, 3956, 3959, + 3960, 3961, 3964, 3965, 3971, 3972, 3973, 3978, 3979, 3980, 3981, 3986, 3987, 3992, + 3993, 3995, 3997, 4000, 4002, 4003, 4007, 4008, 4011, 4012, 4016, 4021, 4023, 4024, + 4027, 4028, 4029, 4031, 4034, 4035, 4037, 4040, 4042, 4043, 4044, 4045, 4053, 4055, + 4056, 4057, 4061, 4066, 4067, 4068, 4069, 4072, 4075, 4076, 4077, 4079, 4080, 4083, + 4084, 4085, 4089, 4092]' +AMCc.SmurfProcessor.ChannelMapper.NumChannels: 1562 +AMCc.SmurfProcessor.ChannelMapper.PayloadSize: 0 +AMCc.SmurfProcessor.Downsampler.Disable: false +AMCc.SmurfProcessor.Downsampler.Factor: 20 +AMCc.SmurfProcessor.FileWriter.DataFile: '' +AMCc.SmurfProcessor.FileWriter.IsOpen: false +AMCc.SmurfProcessor.Filter.A: '[1.0, -3.74145562, 5.25726624, -3.28776591, 0.77203984, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]' +AMCc.SmurfProcessor.Filter.B: '[5.28396689e-06, 2.11358676e-05, 3.17038014e-05, 2.11358676e-05, + 5.28396689e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]' +AMCc.SmurfProcessor.Filter.Disable: false +AMCc.SmurfProcessor.Filter.Gain: 1.0 +AMCc.SmurfProcessor.Filter.Order: 4 +AMCc.SmurfProcessor.FrameRxStats.BadFrameCnt: 0 +AMCc.SmurfProcessor.FrameRxStats.FrameLossCnt: 24 +AMCc.SmurfProcessor.FrameRxStats.FrameOutOrderCnt: 0 +AMCc.SmurfProcessor.PostDataEmulator.Amplitude: 4294967295 +AMCc.SmurfProcessor.PostDataEmulator.Disable: true +AMCc.SmurfProcessor.PostDataEmulator.Offset: 0 +AMCc.SmurfProcessor.PostDataEmulator.Period: 2 +AMCc.SmurfProcessor.PostDataEmulator.Type: Zeros +AMCc.SmurfProcessor.SOStream.dataDropCnt: 0 +AMCc.SmurfProcessor.SOStream.metaDropCnt: 0 +AMCc.SmurfProcessor.SOStream.open_g3stream: 1 +AMCc.SmurfProcessor.SOStream.pysmurf_action: stream_g3_on +AMCc.SmurfProcessor.SOStream.pysmurf_action_timestamp: 1649204943 +AMCc.SmurfProcessor.SOStream.stream_tag: '' diff --git a/agents/smurf_recorder/Dockerfile b/agents/smurf_recorder/Dockerfile deleted file mode 100644 index 01f8932c1..000000000 --- a/agents/smurf_recorder/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -# SOCS SMuRF Recorder - -# Use socs base image -FROM socs:latest - -# Set the working directory to registry directory -WORKDIR /app/socs/agents/smurf_recorder/ -COPY . . - -# Run registry on container startup -ENTRYPOINT ["dumb-init", "python3", "-u", "smurf_recorder.py"] - -# Default site-hub -CMD ["--site-hub=ws://crossbar:8001/ws", \ - "--site-http=http://crossbar:8001/call"] diff --git a/agents/smurf_recorder/smurf_recorder.py b/agents/smurf_recorder/smurf_recorder.py deleted file mode 100644 index 9ffa55e8d..000000000 --- a/agents/smurf_recorder/smurf_recorder.py +++ /dev/null @@ -1,222 +0,0 @@ -import os -from os import environ - -import argparse -import txaio - -from socs.agent.smurf_recorder import FrameRecorder - -# For logging -txaio.use_twisted() - -on_rtd = os.environ.get('READTHEDOCS') == 'True' -if not on_rtd: - from ocs import ocs_agent, site_config - - -class SmurfRecorder: - """Recorder for G3 data streams sent over the G3NetworkSender. - - This Agent is built to work with the SMuRF data streamer, which collects - data from a SMuRF blade, packages it into a G3 Frame and sends it over the - network via a G3NetworkSender. - - We read from the specified address with a G3Reader, and write the data to - .g3 files on disk. - - Parameters - ---------- - agent : OCSAgent - OCSAgent object which forms this Agent - time_per_file : int - Amount of time in seconds to have in each file. Files will be rotated - after this many seconds - data_dir : str - Location to write data files to. Subdirectories will be created within - this directory roughly corresponding to one day. - address : str - Address of the host sending the data via the G3NetworkSender - port : int - Port to listen for data on - - Attributes - ---------- - agent : OCSAgent - OCSAgent object which forms this Agent - time_per_file : int - Amount of time in seconds to have in each file. Files will be rotated - after this many seconds. - data_dir : str - Location to write data files to. Subdirectories will be created within - this directory roughly corresponding to one day. - address : str - Address of the host sending the data via the G3NetworkSender. - port : int - Port to listen for data on. - is_streaming : bool - Tracks whether or not the recorder is writing to disk. Setting to - false stops the recording of data. - log : txaio.tx.Logger - txaio logger object, created by the OCSAgent - target_rate : float - Target sampling rate for monitored channels. - monitored_channels : list - Readout channels to monitor. Incoming data for these channels will be - downsampled and published to an OCS feed. - """ - - def __init__(self, agent, time_per_file, data_dir, stream_id, - address="localhost", port=4536, target_rate=10): - self.agent = agent - self.time_per_file = time_per_file - self.data_dir = data_dir - self.stream_id = stream_id - self.address = "tcp://{}:{}".format(address, port) - self.is_streaming = False - self.log = self.agent.log - self.target_rate = target_rate - self.monitored_channels = [] - - self.agent.register_feed('detectors', record=True) - - def set_monitored_channels(self, session, params=None): - """Sets channels that the recorder should monitor. - - Args - ----- - channels : List[Int], required - List of channel numbers (integers between 0 and 2048) to be - monitored. A maximum of 6 channels can be monitored at a time. - """ - if params is None: - params = {} - - if len(params['channels']) > 6: - return False, "Cannot monitor more than 6 channels" - - self.monitored_channels = params['channels'] - return True, f"Set monitored channels to {self.monitored_channels}" - - def set_target_rate(self, session, params=None): - """Sets the target sample rate for monitored channels. - - Args - ----- - target_rate : float, required - Target rate after downsampling for monitored channels in Hz. - Max target rate is 10 Hz. - """ - if params is None: - params = {} - - if params['target_rate'] > 10: - return False, "Max target rate is 10 Hz" - - self.target_rate = params['target_rate'] - return True, f"Set target sampling rate to {self.target_rate}" - - def start_record(self, session, params=None): - """start_record(params=None) - - OCS Process to start recording SMuRF data. This Process uses - FrameRecorder, which deals with I/O, requiring this process to run in a - worker thread. Be sure to register with blocking=True. - - """ - if params is None: - params = {} - - self.log.info("Data directory set to {}".format(self.data_dir)) - self.log.info("New file every {} seconds".format(self.time_per_file)) - self.log.info("Listening to {}".format(self.address)) - - self.is_streaming = True - - recorder = FrameRecorder(self.time_per_file, self.address, - self.data_dir, self.stream_id, - target_rate=self.target_rate) - - session.set_status('running') - while self.is_streaming: - recorder.monitored_channels = self.monitored_channels - recorder.target_rate = self.target_rate - recorder.run() - for k, v in recorder.stream_data.items(): - if v['timestamps']: - self.agent.publish_to_feed('detectors', v) - - # Explicitly clean up when done - del recorder - - return True, "Finished Recording" - - def stop_record(self, session, params=None): - """stop_record(params=None) - - Stop method associated with start_record process. - - """ - self.is_streaming = False - session.set_status('stopping') - return True, "Stopping Recording" - - -def make_parser(parser=None): - """Build the argument parser for the Agent. Allows sphinx to automatically - build documentation based on this function. - - """ - if parser is None: - parser = argparse.ArgumentParser() - - # Add options specific to this agent. - pgroup = parser.add_argument_group("Agent Options") - pgroup.add_argument("--auto-start", default=False, type=bool, - help="Automatically start listening for data at " + - "Agent startup.") - pgroup.add_argument("--time-per-file", default=600, - help="Amount of time in seconds to put in each file.") - pgroup.add_argument("--data-dir", default="/data/", - help="Location of data directory.") - pgroup.add_argument("--port", default=50000, - help="Port to listen on.") - pgroup.add_argument("--address", default="localhost", - help="Address to listen to.") - pgroup.add_argument('--stream-id', default='None', - help="Stream id of recorded stream. If one is not " - "present in the G3Frames, this is the stream-id" - "that will be used to determine file paths.") - pgroup.add_argument('--target-rate', default=10, type=float, - help="Target rate for monitored readout channels in " - "Hz. This willl be the rate that detector data is " - "streamed to an OCS feed") - - return parser - - -if __name__ == "__main__": - # Start logging - txaio.start_logging(level=environ.get("LOGLEVEL", "info")) - - parser = make_parser() - args = site_config.parse_args(agent_class='SmurfRecorder', parser=parser) - - agent, runner = ocs_agent.init_site_agent(args) - listener = SmurfRecorder(agent, - int(args.time_per_file), - args.data_dir, - args.stream_id, - address=args.address, - port=int(args.port), - target_rate=args.target_rate) - - agent.register_process("record", - listener.start_record, - listener.stop_record, - startup=bool(args.auto_start)) - agent.register_task('set_monitored_channels', - listener.set_monitored_channels, blocking=False) - agent.register_task('set_target_rate', listener.set_target_rate, - blocking=False) - - runner.run(agent, auto_reconnect=True) diff --git a/agents/suprsync/Dockerfile b/agents/suprsync/Dockerfile index bb3f92029..e2ee442b4 100644 --- a/agents/suprsync/Dockerfile +++ b/agents/suprsync/Dockerfile @@ -14,7 +14,7 @@ RUN useradd -d /home/cryo -M cryo -u 1000 && \ # Set the working directory to registry directory WORKDIR /app/socs/agents/suprsync # Copy this agent into the app/agents directory -RUN apt-get install rsync -y +RUN apt-get update && apt-get install -y rsync COPY . . RUN pip3 install -r requirements.txt diff --git a/bin/rename_fields b/bin/rename_fields index 18bb23a0f..a00417d66 100755 --- a/bin/rename_fields +++ b/bin/rename_fields @@ -150,9 +150,8 @@ def _md5sum(path, blocksize=65536): def rename_fields(field_name): """Rename invalid field names.""" # Agents requiring no changes. - # hwp_sim, keithley2230G-psu, pfeiffer_tpg366, pysmurf_archiver, - # pysmurf_controller, pysmurf monitor, smurf_recorder, smurf stream - # simulator, chwp Agent + # keithley2230G-psu, pfeiffer_tpg366, + # pysmurf_controller, pysmurf monitor, smurf stream simulator, chwp Agent # Agents that hardcode their fields: # bluefors, cryomech_cpa diff --git a/docker-compose.yml b/docker-compose.yml index 3514375c1..825bb9ea0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -70,15 +70,6 @@ services: depends_on: - "socs" - # -------------------------------------------------------------------------- - # The Pysmurf Archiver Agent - # -------------------------------------------------------------------------- - ocs-pysmurf-archiver-agent: - image: "ocs-pysmurf-archiver-agent" - build: ./agents/pysmurf_archiver/ - depends_on: - - "socs" - # -------------------------------------------------------------------------- # Smurf stream simulator agent # -------------------------------------------------------------------------- @@ -88,15 +79,6 @@ services: depends_on: - "socs" - # -------------------------------------------------------------------------- - # SMuRF timestream aggregator - # -------------------------------------------------------------------------- - ocs-smurf-recorder: - image: "ocs-smurf-recorder" - build: ./agents/smurf_recorder/ - depends_on: - - "socs" - # -------------------------------------------------------------------------- # The Bluefors log tracking Agent # -------------------------------------------------------------------------- @@ -177,6 +159,15 @@ services: build: ./agents/tektronix3021c/ depends_on: - "socs" + + # -------------------------------------------------------------------------- + # Pfeiffer Turbo Controller + # -------------------------------------------------------------------------- + ocs-pfeiffer-tc400-agent: + image: "ocs-pfeiffer-tc400-agent" + build: ./agents/pfeiffer_tc400/ + depends_on: + - "socs" # -------------------------------------------------------------------------- # CHWP Encoder BBB agent @@ -187,6 +178,15 @@ services: depends_on: - "socs" + # -------------------------------------------------------------------------- + # HWP Rotation Agent + # -------------------------------------------------------------------------- + ocs-hwp-rotation-agent: + image: "ocs-hwp-rotation-agent" + build: ./agents/hwp_rotation/ + depends_on: + - "socs" + # -------------------------------------------------------------------------- # Smurf file emulator agent # -------------------------------------------------------------------------- @@ -200,6 +200,15 @@ services: depends_on: - "socs" + # -------------------------------------------------------------------------- + # Magpie agent + # -------------------------------------------------------------------------- + ocs-magpie-agent: + image: "ocs-magpie-agent" + build: ./agents/magpie + depends_on: + - "socs" + # -------------------------------------------------------------------------- # SOCS Simulators # -------------------------------------------------------------------------- diff --git a/docs/_static/images/lyrebird_cmaps.png b/docs/_static/images/lyrebird_cmaps.png new file mode 100644 index 000000000..4daec597f Binary files /dev/null and b/docs/_static/images/lyrebird_cmaps.png differ diff --git a/docs/_static/images/lyrebird_grid.png b/docs/_static/images/lyrebird_grid.png new file mode 100644 index 000000000..23c4489e8 Binary files /dev/null and b/docs/_static/images/lyrebird_grid.png differ diff --git a/docs/_static/images/lyrebird_wafer.png b/docs/_static/images/lyrebird_wafer.png new file mode 100644 index 000000000..d22138943 Binary files /dev/null and b/docs/_static/images/lyrebird_wafer.png differ diff --git a/docs/agents/hwp_rotation_agent.rst b/docs/agents/hwp_rotation_agent.rst new file mode 100644 index 000000000..1defd9631 --- /dev/null +++ b/docs/agents/hwp_rotation_agent.rst @@ -0,0 +1,70 @@ +.. highlight:: rst + +.. _hwp_rotation_agent: + +================== +HWP Rotation Agent +================== + +.. argparse:: + :filename: ../agents/hwp_rotation/rotation_agent.py + :func: make_parser + :prog: python3 rotation_agent.py + +Configuration File Examples +--------------------------- + +Below are configuration examples for the ocs config file and for running the +Agent in a docker container. + +OCS Site Config +``````````````` + +An example site-config-file block:: + + {'agent-class': 'RotationAgent', + 'instance-id': 'rotator', + 'arguments': [['--kikusui-ip', '10.10.10.100'], + ['--kikusui-port', '2000'], + ['--pid-ip', '10.10.10.101'], + ['--pid-port', '2001'], + ['--mode', 'iv_acq']]}, + +Docker Compose +`````````````` + +An example docker-compose configuration:: + + ocs-hwp-rotation: + image: simonsobs/ocs-hwp-rotation-agent:latest + hostname: ocs-docker + network_mode: "host" + volumes: + - ${OCS_CONFIG_DIR}:/config:ro + command: + - "--instance-id=hwp-rotation" + - "--site-hub=ws://127.0.0.1:8001/ws" + - "--site-http=http://127.0.0.1:8001/call" + +.. note:: + Since the Agent container needs ``network_mode: "host"``, it must be + configured to connect to the crossbar server as if it was on the host + system. In this example the crossbar server is running on localhost, + ``127.0.0.1``, but on your network this may be different. + +Description +----------- + + +Agent API +--------- + +.. autoclass:: agents.hwp_rotation.rotation_agent.RotationAgent + :members: + +Supporting APIs +--------------- + +.. automodule:: socs.agent.pmx + :members: + :noindex: diff --git a/docs/agents/lakeshore370.rst b/docs/agents/lakeshore370.rst index f2ad78fc7..f7e6f861f 100644 --- a/docs/agents/lakeshore370.rst +++ b/docs/agents/lakeshore370.rst @@ -83,8 +83,8 @@ Agent API .. autoclass:: agents.lakeshore370.LS370_agent.LS370_Agent :members: init_lakeshore_task, start_acq -Driver API ----------- +Supporting APIs +--------------- For the API all methods should start with one of the following: diff --git a/docs/agents/lakeshore372.rst b/docs/agents/lakeshore372.rst index 4f49e7295..adaeb0a7c 100644 --- a/docs/agents/lakeshore372.rst +++ b/docs/agents/lakeshore372.rst @@ -36,11 +36,84 @@ configuration block:: ['--dwell-time-delay', 0], ['--mode', 'acq'], ['--sample-heater', False], - ['--enable-control-chan']]}, + ['--enable-control-chan'], + ['--configfile', 'ls372_config.yaml']]}, + Each device requires configuration under 'agent-instances'. See the OCS site configs documentation for more details. +Lakeshore 372 Config +````````````````````` +A Lakeshore 372 configuration file allows the user to define device and channel +settings (autoscan, enable/disable, calibration curve, etc.) for each Lakeshore +372 in use within one .yaml file. Here is an example for how to build a +Lakeshore 372 configuration file:: + + LSA22YG: + device_settings: + autoscan: 'off' + channel: + 1: + enable: 'on' + excitation_mode: 'voltage' + excitation_value: 2.0e-6 + autorange: 'on' + dwell: 15 # seconds + pause: 10 # seconds + calibration_curve_num: 23 + temperature_coeff: 'negative' + 2: + enable: 'off' + excitation_mode: 'voltage' + excitation_value: 2.0e-6 + autorange: 'off' + dwell: 10 # seconds + pause: 3 # seconds + calibruation_curve_num: 28 + temperature_coeff: 'negative' + LSA2761: + device_settings: + autoscan: 'on' + channel: + 1: + enable: 'on' + excitation_mode: 'voltage' + excitation_value: 2.0e-6 + autorange: 'on' + dwell: 15 # seconds + pause: 10 # seconds + calibration_curve_num: 33 + temperature_coeff: 'negative' + 2: + enable: 'off' + excitation_mode: 'voltage' + excitation_value: 2.0e-6 + autorange: 'off' + dwell: 10 # seconds + pause: 3 # seconds + calibruation_curve_num: 36 + temperature_coeff: 'negative' + 3: + enable: 'on' + excitation_mode: 'voltage' + excitation_value: 2.0e-6 + autorange: 'on' + dwell: 15 # seconds + pause: 10 # seconds + calibration_curve_num: 34 + temperature_coeff: 'negative' + 4: + enable: 'on' + excitation_mode: 'voltage' + excitation_value: 2.0e-6 + autorange: 'off' + dwell: 10 # seconds + pause: 3 # seconds + calibruation_curve_num: 35 + temperature_coeff: 'negative' + + Docker Compose `````````````` diff --git a/docs/agents/lakeshore425.rst b/docs/agents/lakeshore425.rst index 155569271..e6b1c1fb0 100644 --- a/docs/agents/lakeshore425.rst +++ b/docs/agents/lakeshore425.rst @@ -57,3 +57,11 @@ Agent API .. autoclass:: agents.lakeshore425.LS425_agent.LS425Agent :members: + +Supporting APIs +--------------- + +.. automodule:: socs.Lakeshore.Lakeshore425 + :members: + :undoc-members: + diff --git a/docs/agents/magpie.rst b/docs/agents/magpie.rst new file mode 100644 index 000000000..c6460403e --- /dev/null +++ b/docs/agents/magpie.rst @@ -0,0 +1,342 @@ +.. highlight:: rst + +.. _magpie: + +=============== +Magpie Agent +=============== + +The magpie is an incredibly intelligent bird, with a decent ability to mimic +other bird calls, though not as good as `the superb lyrebird +`_. In the context of OCS, the job +of the Magpie agent is to take detector data from the SMuRF Streamer and +translate into G3Frames that are compatible with lyrebird. This requires +a small bit of pre-processing and downsampling that is done whenever a new +frame is received. + +.. argparse:: + :filename: ../agents/magpie/magpie_agent.py + :func: make_parser + :prog: python3 magpie_agent.py + +Configuration File Examples +------------------------------ + +Below are example docker-compose and ocs configuration files for running the +magpie agent. + +OCS Site Config +`````````````````` +Below is the site-config entry for the magpie instance we have running at UCSD +on K2SO. We are displaying detectors using a wafer layout, determined by +a detmap csv file, with a target sample rate of 20 Hz:: + + {'agent-class': 'MagpieAgent', + 'instance-id': 'magpie-crate1slot2', + 'arguments': [ + '--stream-id', 'crate1-slot2', + + '--src', 'tcp://localhost:4532', + + '--dest', 8675, + '--delay', 5, + '--target-rate', 20, + + '--layout', 'wafer', + # Detmap CSV file + '--det-map', '/home/jlashner/lyrebird_demo/detmap.csv', + '--offset', 0, 0, + ]}, + +It is possible to tell magpie to stream data from existing G3Files instead of +directly from the smurf-stream. To do this, simply set the ``src`` argument to +be the filepath of the file you wish to stream. If you want to stream from +multiple files in series, you can do this by putting multiple source filepaths, +as is shown below. In this case, once the first file has finished streaming +magpie will immediately start streaming from the second file:: + + '--src', '/path/to/file1.g3', '/path/to/file2.g3', + +You can also tell the magpie agent to ignore the src argument all together and generate +fake data by adding the ``--fake-data`` argument. + + +Docker Compose +`````````````````` + +Below is an example docker-compose entry for running a magpie corresponding to +crate-id 1 and slot 2. If crossbar is being run on a different server, you'll +have to modify the ``site-hub`` and ``site-http`` args accordingly:: + + ocs-magpie-crate1slot2: + image: simonsobs/ocs-magpie:${SOCS_TAG} + hostname: ocs-docker + user: ocs:ocs + network_mode: host + volumes: + - ${OCS_CONFIG_DIR}:/config + - /data:/data + command: + - "--site-hub=ws://localhost:8001/ws" + - "--site-http=http://localhost:8001/call" + - "--instance-id=magpie-crate1slot2" + +Lyrebird +------------ + +Installation +`````````````` + +In order to build lyrebird, you first need a local build of spt3g-software. +Since we require direct access to the G3 C++ interface, unfortunately we're +not able to use the spt3g / so3g pypi package. You can follow the instructions +on `the SPT3G github page `_ to +build. + +Once that is successful, clone the simonsobs fork of +`lyrebird `_, +and follow the build instructions on the lyrebird readme. + +After lyrebird is built successfully, you'll want to add the following lines +to your bashrc: + +.. code-block:: bash + + export PATH=/path/to/lyrebird/build/lyrebird-bin:$PATH + export PATH=/path/to/lyrebird/bin:$PATH + +This will add two scripts to your path: + + - ``lyrebird``, the main lyrebird executable which takes in the path to a cfg + file specifying data vals, streamer ports, etc. and starts lyrebird from + that + - ``run_lyrebird`` which just takes the ports to listen to as + command line arguments, and will use that to generate a new + cfg file and start lyrebird from that. + +Startup +`````````` + +To get lyrebird running, you must bring software up in the following order: + +1. Make sure smurf-streamers you wish to monitor are running, though data + doesn't have to actually be streaming +2. Bring up Magpie OCS agents. If streamers are not already running, this + will currently fail when it begins the ``read`` process. If this happens, + you can restart the process manually using an OCS client, or just restart + the agent, which will begin the process on startup. Make sure each magpie + instance you are running has a different ``stream-id`` and a different + ``dest`` port. +3. Run lyrebird. The ``lyrebird`` executable takes in a config file that + specifies data-vals and ports to monitor for G3Streams, but it is much + easier to use the ``run_lyrebird`` script in the ``lyrebird/bin`` directory. + This will generate a temporary config file determined by the arguments + passed in, and then start lyrebird with that config file. Right now + you need to pass in the ports that it should monitor. For instance, + if you have two magpie agents running with ``dest`` ports 8675 and 8676, + you can run:: + + run_lyrebird --port 8675 8676 + + and this will start lyrebird for the two corresponding slots. + + +Operation +`````````````` +This section will describe the main operation of the SO fork is lyrebird. +This isn't necessary for running magpie, but this isn't well documented in +the lyrebird repo so this may be useful for future development. + +On startup, lyrebird will attempt to connect to a G3NetworkSender streaming +on any ports specified in the lyrebird config file. The first frame read +from the port must be a config frame containing details on how lyrebird +should display the focal-plane. + +A config file for a focal-plane with `nchans` channels will contain the +following fields: + + - ``x``: Array of len ``nchans`` with the x-coords of the visual element for + each channel + - ``y``: Array of len ``nchans`` with the y-coords of the visual element for + each channel + - ``cname``: Array of len ``nchans`` the base name for each channel. This is something + like ``/channel_10`` + - ``rotation``: Array of len ``nchans`` containing how many rads each vis elem should + be rotated (counter-clockwise) when drawn. + - ``templates``: Array of len ``nchans`` containing the det template (defined in the + - ``values``: Array of len ``n * nchans`` containing data values corresponding + to a given channel. These must be unique, and are of the form + ``/`` + - ``eqs``: Array of len ``m * nchans`` containing the equation descriptions for + each channel. See below for a full description, but these will be something + like ``/ + 1 s / 2``. + - ``eq_labels``: Array of len ``m * nchans`` containing the labels for each + individual equation. These are what will be displayed in the lyrebird menu + gui, and can be something like ``osc-tod`` or ``rms``. + - ``color_is_dynamic``: Array of len ``m * nchans`` containing bools that + determined if the eq values should be dynamically adjusted before passing + to the colormap. If this is False, the eq output will be sent directly to the + colormap. If True, the eq output will first be mapped to a value in the + range (0, 1) by interpolating between the min/max values in the lyrebird + data buffer. + - ``cmaps``: Array of len ``m * nchans`` containing the name of the colormap + to use to display each equation. + +Data Values +"""""""""""" +**data values** are named values that lyrebird buffers and tracks internally, +and can be used in equation evaluations. + +A number of data values can be stored per detector channel (as long as this +number is the same for all detector channels). So for instance if you want +to keep track of the detector TOD, rms, bias-group, and whether the channel +is flagged, you'll need to register the data values ``/tod``, +``/bg``, ``/rms``, ``/flagged`` for each detector. + +Equations +"""""""""" +Lyrebird can also store any number of **equations** for each detector channel. +This number can be different from the number of data values, but the number +of equations must be the same for each detector. +These equations are what are actually visualized for each channel in the +lyrebird GUI. + +The equations are registered as strings in the +`Polish Notation `_. This is +a method of describing equations where the operator comes before the operands, +and is a notation that is very easy to parse and evaluate. For example, the +operation :math:`a + b` will be ``+ a b`` in polish notation, and :math:`(a + +b) / 2` can be written as ``/ + a b 2``. Operands can either be numeric values +or registered data values, including both channel-specific data values such +as ``/tod`` and global data values that are registered in the lyrebird +config variable. + +Below is a full table of operators that can be interpreted by lyrebird: + +.. list-table:: Lyrebird Equation Operations + :widths: 25 25 25 25 + :header-rows: 0 + + * - ``+ x y`` + - :math:`x + y` + - ``| x y`` + - :math:`x \;\mathrm{or}\; y` + * - ``- x y`` + - :math:`x - y` + - ``= x y`` + - :math:`x == y` + * - ``* x y`` + - :math:`x * y` + - ``! x`` + - :math:`\mathrm{not}\; x` + * - ``/ x y`` + - :math:`x / y` + - ``& x y`` + - :math:`x \;\mathrm{and}\; y` + * - ``% x y`` + - :math:`x % y` + - ``c x`` + - :math:`\cos(x)` + * - ``a x`` + - :math:`\left|x\right|` + - ``t x`` + - :math:`\tan(x)` + * - ``^ x y`` + - :math:`x^y` + - ``T x`` + - :math:`\arctan(x)` + * - ``s x`` + - :math:`\sin(x)` + - ``q x`` + - :math:`\sqrt{x}` + + +For a more complex equation you might want to do something like display the +TOD value if a channel is not flagged, and if it is flagged send it to -1. +This example is done with the string:: + + + * ! flagged tod * -1 flagged + +which describes the equation: + +.. math:: + + \mathrm{tod} \times (! \mathrm{flagged}) + -1 \times \mathrm{flagged} + +The displayed equation can be selected by changing the **displayed_eq** idx in the +lyrebird GUI. + +Color Maps +"""""""""""" + +After equations are evaluated, the results will be passed to the specified +color map to determine what color the visual element should be drawn with. + +Below are the colormaps available in lyrebird. Generally these take in +arbitrary floats, but will clamp them into the range [0, 1]. Along with the +standard cmaps, there are additional *bolo_cmaps*, that map different regions +of the range [0, 1] to varying colors to for different purposes. If the value +is 0 or infinite, the vis-elem will be colored grey. The range (0, 0.3) will be +colored bright red, and then the range (.3, 1) will be a gradient from black +to a base color to white as pictured below.. + +.. image:: ../_static/images/lyrebird_cmaps.png + :width: 1200 + :alt: Lyrebird Colormaps + +There are additional colormaps ``white_cmap_fs`` and ``rainbow_cmap_fs`` that +are identical to their counterparts above but instead map onto the range [-1, 1]. +The colormap ``phase_cmap`` is identical to ``rainbow_cmap`` but maps the range +:math:`[0, \pi]`. + + +If an equation's color is set to be dynamic, each equation value will be mapped +to the range [0, 1] by interpolating between the min and max value in the +equation's data buffer. This allows you to view changes equations that span a +large range with a high dynamic-range, but makes direct channel-to-channel +comparison impossible. + +Detector Layouts +----------------- + +Grid Layout +```````````` +The grid layout is a grid with 4096 elements (maximum number of channels a +single smurf slot can stream) + +.. image:: ../_static/images/lyrebird_grid.png + :width: 400 + :alt: Alternative text + +This layout contains 8 rows containing 512 detectors each, with the bottom +row being band 0, and the top row band 7. This is the easiest layout to set up +and is useful for viewing detector response as a function of their resonator +frequency or band / channel id. + +Wafer Layout +````````````` + +The Wafer layout takes in a det-map CSV file generated from the simonsobs +`detmap `_ package, and uses it +to generate a focal-plane layout. + +.. image:: ../_static/images/lyrebird_wafer.png + :width: 400 + :alt: Alternative text + +Agent API +------------------ + +.. autoclass:: agents.magpie.magpie_agent.MagpieAgent + :members: + +Supporting APIs +------------------ +.. autoclass:: agents.magpie.magpie_agent.FIRFilter + :members: + +.. autoclass:: agents.magpie.magpie_agent.RollingAvg + :members: + +.. autoclass:: agents.magpie.magpie_agent.FocalplaneConfig + :members: diff --git a/docs/agents/meinberg_m1000_agent.rst b/docs/agents/meinberg_m1000_agent.rst index 15ec54c56..1535698c0 100644 --- a/docs/agents/meinberg_m1000_agent.rst +++ b/docs/agents/meinberg_m1000_agent.rst @@ -60,7 +60,8 @@ using all of the available arguments:: 'instance-id': 'meinberg-m1000', 'arguments': [['--address', '10.10.10.101'], ['--port', 161], - ['--autostart', True]]}, + ['--auto-start', True], + ['--snmp-version', 3]]}, .. note:: The ``--address`` argument should be the address of the M1000 on the network. @@ -74,7 +75,7 @@ The Meinberg M1000 Agent should be configured to run in a Docker container. An example docker-compose service configuration is shown here:: ocs-m1000: - image: simonsobs/meinberg-m1000-agent + image: simonsobs/ocs-meinberg-m1000-agent hostname: ocs-docker network_mode: "host" volumes: diff --git a/docs/agents/pfeiffer_tc400.rst b/docs/agents/pfeiffer_tc400.rst new file mode 100644 index 000000000..41c5de759 --- /dev/null +++ b/docs/agents/pfeiffer_tc400.rst @@ -0,0 +1,108 @@ +.. highlight:: rst + +.. _pfeiffer_tc400_agent: + +============== +Pfeiffer TC400 +============== + +The Pfeiffer TC400 agent is an OCS Agent which controls the +Pfeiffer TC400 electronic drive unit, which control the turbos used +for the bluefors DR. The communcation is done over serial, and should be +integrated into OCS using a serial-to-ethernet converter. + +.. argparse:: + :filename: ../agents/pfeiffer_tc400/pfeiffer_tc400_agent.py + :func: make_parser + :prog: python3 pfeiffer_tc400_agent.py + +Description +----------- + +Serial Configuration +```````````````````` +:: + + baudrate=9600 + data bits=8 + stop bits=1 + parity=None + Flow control=RTS/CTS + FIFO=Enable + Interface=RS-485-2-Wire + +Configuration File Examples +--------------------------- +Below are configuration examples for the ocs config file and for running the +Agent in a docker container. + +OCS Site Config +``````````````` +To configure the Pfeiffer TC400 Agent we need to add a PfeifferTC400Agent +block to our ocs configuration file. The IP address and port +number are from the serial-to-ethernet converter. The turbo address is +visible on the power supply front panel. Here is an example configuration +block using all of the available arguments:: + + {'agent-class': 'PfeifferTC400Agent', + 'instance-id': 'pfeifferturboA', + 'arguments': [['--ip-address', '10.10.10.129'], + ['--port-number', '4002'], + ['--turbo-address', '1']]}, + + +Docker Compose +`````````````` +The agent should be configured to run in a Docker container. An +example docker-compose service configuration is shown here:: + + ocs-pfeiffer-turboA: + image: simonsobs/ocs-pfeiffer-tc400-agent + <<: *log-options + hostname: manny-docker + network_mode: "host" + volumes: + - ${OCS_CONFIG_DIR}:/config + command: + - "--instance-id=pfeifferturboA" + +Since the agent within the container needs to communicate with hardware on the +host network you must use ``network_mode: "host"`` in your compose file. + +Agent API +--------- + +.. autoclass:: agents.pfeiffer_tc400.pfeiffer_tc400_agent.PfeifferTC400Agent + :members: + +Example Clients +--------------- +The turbo agent can start and stop a turbo, acknowledge an error (this is +required to start again after an error occurs), and acquire turbo data. This +example client shows all of this functionality:: + + from ocs.ocs_client import OCSClient + client = OCSClient("pfeifferturboA) + + # Start data acq + status, message, session = client.acq.start() + print(session) + + # Stop data acq + client.acq.stop() + client.acq.wait() + + # Start the turbo + client.turn_turbo_on() + + # Stop the turbo + client.turn_turbo_off() + + # Acknowledge errors + client.acknowledge_turbo_errors() + +Driver API +---------- + +.. autoclass:: agents.pfeiffer_tc400.pfeiffer_tc400_driver.PfeifferTC400 + :members: diff --git a/docs/agents/pysmurf/pysmurf-controller.rst b/docs/agents/pysmurf-controller.rst similarity index 82% rename from docs/agents/pysmurf/pysmurf-controller.rst rename to docs/agents/pysmurf-controller.rst index ab938ee8c..aa9fcc1d2 100644 --- a/docs/agents/pysmurf/pysmurf-controller.rst +++ b/docs/agents/pysmurf-controller.rst @@ -57,6 +57,7 @@ named ``ocs-pysmurf-monitor`` might look something like:: OCS_CONFIG_DIR: /config EPICS_CA_ADDR_LIST: 127.255.255.255 EPICS_CA_MAX_ARRAY_BYTES: 80000000 + SLOT: 2 volumes: - ${OCS_CONFIG_DIR}:/config - /data:/data @@ -100,26 +101,24 @@ discarded instead of published. Description ------------ -The pysmurf controller runs pysmurf scripts by beginning an external process -that initializes a pysmurf object and calls its functions. -Generally SODETLIB and Pysmurf are mounted into the sodetlib docker as is seen -in the docker-compose entry above, allowing for fast iteration of control -scripts. - -The meat of this agent exists in the ``_run_script`` function, which starts the -script protocol and handles output. This will be called directly by the ``run`` -task, or any other task or process built into the agent. ``_run_script`` is -protected by a lock so that only one script can be run by the agent at a time. -The ``abort`` task can be used to interupt the actively running script and -release the lock. - -The ``run`` task takes in an arbitrary python script and list of command line -arguments, and is generic enough to run any pysmurf control routine we have a -script for. However as we start to finalize the sodetlib control scripts we -wish to use during operation, we will build these into their own tasks or -processes which take in parameters specific to that script. The ``tune_squids`` -task is an example of how we can do this, but it just runs a fake script for -the time being. +The Pysmurf Controller agent exposes many essential pysmurf and sodetlib +operations so that they can be called using OCS. Tasks and processes will +generate a new local pysmurf and det-config instance, and load in the tunefile +specified in the device cfg. The session object will be saved to the pysmurf +instance as ``S._ocs_session`` so that sodetlib functions can add logs and data +directly. + +Additionally, arbitrary SODETLIB scripts can be run as subprocesses using the +``run`` task. Data can still be added to the session by passing it using the +PysmurfPublisher to communicate with pysmurf-monitor agent (see the `Passing +Session Data`_ section for more info). + +In order for the Pysmurf instance to accurately represent the smurf state, +we must be careful about not using a second pysmurf instance to modify any +variables while a persistant instance exists. For that reason, the ``run`` +function and most tasks are protected by a single lock, preventing you +from running multiple tasks at a time. + Example Clients ---------------- @@ -146,6 +145,8 @@ would look like:: args=['--bands', '0', '1', '2'] controller.run.start(script=script_path, args=args) + + Passing Session Data `````````````````````` The Pysmurf Publisher can be used to pass information back from a detector operation diff --git a/docs/agents/pysmurf/pysmurf-monitor.rst b/docs/agents/pysmurf-monitor.rst similarity index 100% rename from docs/agents/pysmurf/pysmurf-monitor.rst rename to docs/agents/pysmurf-monitor.rst diff --git a/docs/agents/pysmurf/index.rst b/docs/agents/pysmurf/index.rst deleted file mode 100644 index d3468708d..000000000 --- a/docs/agents/pysmurf/index.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. highlight:: rst - -.. _pysmurf: - -==================== -Pysmurf/OCS Overview -==================== - -There are three agents used to interact with `pysmurf `__. -The Pysmurf Controller is used to run pysmurf scripts. -The Pysmurf Monitor listens to pysmurf messages published with the *pysmurf publisher*. -The Pysmurf Archiver copies files pysmurf writes onto a storage node. -There should be one controller per pysmurf instance (per smurf card), -but one pysmurf-monitor and archiver can handle multiple instances. - -These pages will show you how to setup these three agents. - -.. toctree:: - :caption: Pysmurf agents: - :maxdepth: 3 - - pysmurf-controller - pysmurf-monitor - pysmurf-archiver diff --git a/docs/agents/pysmurf/pysmurf-archiver.rst b/docs/agents/pysmurf/pysmurf-archiver.rst deleted file mode 100644 index 12c8bee0d..000000000 --- a/docs/agents/pysmurf/pysmurf-archiver.rst +++ /dev/null @@ -1,107 +0,0 @@ -.. highlight:: rst - -.. _pysmurf_archiver: - -==================== -Pysmurf Archiver -==================== - -The Pysmurf Archiver agent actively monitors the ``pysmurf_files`` database, -and copies newly registered files to their new archived path on the storage -node. - -.. argparse:: - :filename: ../agents/pysmurf_archiver/pysmurf_archiver_agent.py - :func: make_parser - :prog: python3 pysmurf_archiver_agent.py - -Dependencies ------------- - -In addition to the dependencies listed in the Pysmurf Archiver's -``requirements.txt`` file, the Archiver needs ssh-keys configured to be able to -rsync files off of the smurf server. - -Setting up SSH Permissions -`````````````````````````` -For instructions on how to setup ssh-permissions for the pysmurf-archiver, -see the following SO-wiki page: http://simonsobservatory.wikidot.com/daq:smurf-ssh-permissions - -Configuration File Examples ------------------------------ - -OCS Site Config -```````````````` - -Example site-config entry:: - - {'agent-class': 'PysmurfArchiverAgent', - 'instance-id': 'pysmurf-archiver', - 'arguments': [['--data-dir', '/data/pysmurf/'], - ['--host', 'data@smurf-srv'], - ['--mode', 'run'], - ['--target', 'pysmurf-monitor']]}, - -Docker Compose -```````````````` -The docker-compose entry is similar to that of the pysmurf-monitor. For example:: - - ocs-pysmurf-archiver: - image: simonsobs/ocs-pysmurf-archiver-agent:${SOCS_TAG} - user: "9000" # ocs user id - container_name: ocs-pysmurf-archiver - environment: - MYSQL_HOST: ${DB_HOST} - MYSQL_DATABASE: ${DB} - MYSQL_USER: ${DB_USER} - MYSQL_PASSWORD: ${DB_PW} - volumes: - - ${OCS_CONFIG_DIR}:/config - - /home/ocs:/home/ocs - - /data:/data - -Description ------------ - -The archiver's role is to copy files from from the smurf-server over to a -storage node. It does this by continuously monitoring the pysmurf_files -database, and finding any uncopied files writen by the target specified in the -site args. - -It then rsync's the files over to a new location on the storage node, -setting ``copied=1`` if successful, and incrementing the failure counter if -unsuccessful. - -There is a single **run** process that continuously checks the database -for new files. This is automatically started when the agent boots and shouldn't -stop unless something breaks. - - -Archived Path -`````````````` - -The archiver uses the ``action`` and ``action_timestamp`` fields so that -plots and outputs that are created during a single user action are archived -together. Action names for pysmurf and sodetlib functions are generally the -top-level function name that the user runs, but actions can also be set at -runtime with the keyword argument ``pub_action``. - -The archived path is determined by:: - - /<5 ctime digits>//_/ - -Where ```` is the ocs-site argument for the archiver, the 5 ctime -digits corresponds with ~ 1 day of data, and ```` is the pysmurf -publisher id. -For instance, if a user runs ``S.tracking_setup`` at ``ctime`` 1589517264, -on crate=1, slot=2, the output might be stored in the directory:: - - /15895/crate1_slot2/1589517264_tracking_setup/outputs - -.. autofunction:: agents.pysmurf_archiver.pysmurf_archiver_agent.create_local_path - - -Agent API ----------- -.. autoclass:: agents.pysmurf_archiver.pysmurf_archiver_agent.PysmurfArchiverAgent - :members: diff --git a/docs/agents/smurf_file_emulator.rst b/docs/agents/smurf_file_emulator.rst index 3854f37a8..2b5455fc1 100644 --- a/docs/agents/smurf_file_emulator.rst +++ b/docs/agents/smurf_file_emulator.rst @@ -34,7 +34,10 @@ Here is an example of the file-emulator site-configuration block:: 'arguments': [[ '--stream-id', 'emulator', '--base-dir', '/path/to/fake/data/directory', - '--file-duration', 60 + '--file-duration', 60, + '--nchans', 1024, + '--sample-rate', 200, + '--frame-len', 2 ]]} diff --git a/docs/agents/smurf_recorder.rst b/docs/agents/smurf_recorder.rst deleted file mode 100644 index 7d36578d9..000000000 --- a/docs/agents/smurf_recorder.rst +++ /dev/null @@ -1,125 +0,0 @@ -.. highlight:: rst - -.. _smurf_recorder: - -==================== -SMuRF Recorder Agent -==================== - -The SMuRF Recorder Agent is an OCS Agent which listens for G3 Frames -incoming from a G3NetworkSender connection using a G3Reader and writes them to -disk. This is used in conjunction with the SMuRF Streamer to record data from -the SMuRF systems. - -.. argparse:: - :filename: ../agents/smurf_recorder/smurf_recorder.py - :func: make_parser - :prog: python3 smurf_recorder.py - -Description ------------ -The SMuRF Recorder Agent automatically starts listening for a -G3NetworkSender which will send it frames. This will typically be the SMuRF -Streamer. There are several steps performed in a loop, the first of which is an -attempt to read frames from an established connection. A connection attempt is -made if it is not already setup. Meaning it is safe to start the Agent before -a connection is available. - -Once a connection is made it will read frames that are sent by the streamer. -Frames should be sent approximately every second. Flow control frames -indicating the connection should stay alive should be sent and are discarded -immediately upon receipt before proceeding to the next step. If the reader -times out then the file will be closed and the reader destroyed, meaning the -connection will need to be reestablished in the next iteration of the loop. -This should be fine, as long as we do not enter a state where many connections -are made in succession. - -G3Files will be written to the path:: - - /<5 ctime digits>//filename - -The recorder will attempt to read the stream-id from the G3Frames, however -if no stream-id is present, the stream-id passed in the site arguments will -be used. - -The recorder will write the frames to file with file names and location based -on the timestamp when the acquisition was started (i.e. the first frame was -written.) Files will be at most "time-per-file" long, which is configurable but -defaults to 10 minutes (the same duration planned for when we are on the -telescope.) Acquisitions that are longer than that will have the same start to -their filenames based on the timestamp when acquisition started, but will -increment a zero padded suffix so one will end up with files like -`2019-01-01-12-00-00_000.g3`, `2019-01-01-12-00-00_001.g3`, etc. for -acquisitions started at 12:00:00 UTC on Jan 1st, 2019. - -The recorder handles flow control frames to indicate the start and end of -each acquisition. If a start frame is seen, the currently open file (if there -is one) is closed, and a new file created. If an end frame is seen, the current -file is closed. If frames are seen without a beginning start frame, then they -will be recorded as if a start frame was sent. If a file was started, but no -new frames are acquired for 10 seconds, then the file is closed. A new file -will be created when frames start to come in again. This is an attempt to match -output of other files from the SMuRF, which may be grouped by observation. - -Directory location for a group of files will not rotate through a date change, -i.e. if you cross the threshold in ctime between say 15684 and 15685, but are -still on the same acquisition, all files will end up in 15684 to keep them -grouped together. - -Configuration File Examples ---------------------------- -Below are configuration examples for the ocs config file and for running the -Agent in a docker container. - -ocs-config -`````````` -To configure the SMuRF Recorder we need to add a SmurfRecorder -block to our ocs configuration file. Here is an example configuration block -using all of the available arguments:: - - {'agent-class': 'SmurfRecorder', - 'instance-id': 'smurf-recorder', - 'arguments': [['--auto-start', True], - ['--time-per-file', '600'], - ['--data-dir', '/data/'], - ['--port', '50000'], - ['--address', 'smurf-stream-sim'], - ['--stream-id', 'crate1slot2']]}, - -A few things to keep in mind. The ``--data-dir`` is the directory within the -container, the default is probably fine, but can be changed if needed, you'll -just need to mount your directory appropriately when starting the container. -The ``--address`` can be resolved by name if your containers are running within -the same Docker environment, however if your setup differs this likely will be -an IP address, with the container running with the "host" network-mode. - -Docker -`````` -The SMuRF Recorder should be configured to run in a Docker container. An -example docker-compose service configuration is shown here:: - - ocs-smurf-recorder: - image: simonsobs/ocs-smurf-recorder - hostname: ocs-docker - volumes: - - ${OCS_CONFIG_DIR}:/config:ro - - ./data:/data - environment: - LOGLEVEL: "info" - -The ``./data`` directory path should point to the location on your host machine -that you want to write the data to. The ``LOGLEVEL`` environment variable can -be used to set the log level for debugging. The default level is "info". - -Agent API ---------- - -.. autoclass:: agents.smurf_recorder.smurf_recorder.SmurfRecorder - :members: start_record - -Developer Info --------------- -If you're editing the SMuRF Recorder code you might find this info useful. - -.. autoclass:: agents.smurf_recorder.smurf_recorder.FrameRecorder - :members: diff --git a/docs/agents/suprsync.rst b/docs/agents/suprsync.rst index 2adc8a498..5053336ec 100644 --- a/docs/agents/suprsync.rst +++ b/docs/agents/suprsync.rst @@ -181,17 +181,18 @@ Agent API Supporting APIs --------------- - .. _SupRsyncFiles: SupRsyncFiles Table -```````````````````````````````` +``````````````````` + .. autoclass:: socs.db.suprsync.SupRsyncFile - :members: + :members: .. _SupRsyncFilesManager: SupRsyncFiles Manager -`````````````````````````````` +````````````````````` + .. autoclass:: socs.db.suprsync.SupRsyncFilesManager - :members: + :members: diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 000000000..92fa4a9ea --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,107 @@ +API +=== + +This page contains the auto-generated documentation for the socs package. + +socs.agent +---------- + +The ``agent/`` directory contains code that supports one or more socs Agents. + +socs.agent.moxaSerial +````````````````````` + +.. automodule:: socs.agent.moxaSerial + :members: + :undoc-members: + :show-inheritance: + +socs.agent.pmx +`````````````` + +.. automodule:: socs.agent.pmx + :members: + :undoc-members: + :show-inheritance: + +socs.agent.prologix_interface +````````````````````````````` + +.. automodule:: socs.agent.prologix_interface + :members: + :undoc-members: + :show-inheritance: + +socs.agent.scpi_psu_driver +`````````````````````````` + +.. automodule:: socs.agent.scpi_psu_driver + :members: + :undoc-members: + :show-inheritance: + +socs.agent.tektronix3021c_driver +```````````````````````````````` + +.. automodule:: socs.agent.tektronix3021c_driver + :members: + :undoc-members: + :show-inheritance: + +socs.agent.vantage_pro2 +``````````````````````` + +.. automodule:: socs.agent.vantage_pro2.vantage_pro2 + :members: + :undoc-members: + :show-inheritance: + +socs.db +------- + +The ``db/`` directory contains code that supports databases used within socs. + +socs.db.suprsync +```````````````` + +.. automodule:: socs.db.suprsync + :members: + :undoc-members: + :show-inheritance: + :noindex: + +socs.Lakeshore +-------------- + +The ``Lakeshore/`` directory contains code that supports the various Lakeshore +devices used in socs. For details, see the "Supporting APIs" section of the +relevant Agent page. + +socs.snmp +--------- + +.. automodule:: socs.snmp + :members: + :undoc-members: + :show-inheritance: + +socs.testing +------------ + +The ``testing/`` directory contains code that supports testing Agents. + +socs.testing.device_emulator +```````````````````````````` + +.. automodule:: socs.testing.device_emulator + :members: + :undoc-members: + :show-inheritance: + +socs.util +--------- + +.. automodule:: socs.util + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/conf.py b/docs/conf.py index 173f26d37..9c914f770 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -80,6 +80,7 @@ autodoc_mock_imports = ['spt3g', 'so3g', + 'so3g.proj', 'labjack', 'labjack.ljm', 'labjack.ljm.ljm', @@ -93,6 +94,12 @@ 'soaculib', 'scan_helpers', 'soaculib.twisted_backend', + 'pfeiffer_vacuum_protocol', + 'pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol', + 'pfeiffer_tc400_driver', + 'sodetlib', + 'sodetlib.operations', + 'sodetlib.det_config', ] from unittest import mock for m in autodoc_mock_imports: diff --git a/docs/developer/snmp.rst b/docs/developer/snmp.rst index 88d2edd86..b463f8dd8 100644 --- a/docs/developer/snmp.rst +++ b/docs/developer/snmp.rst @@ -97,3 +97,4 @@ interface is available for use and detailed here: .. autoclass:: socs.snmp.SNMPTwister :members: + :noindex: diff --git a/docs/developer/testing.rst b/docs/developer/testing.rst index 0449bb665..e0ea597b2 100644 --- a/docs/developer/testing.rst +++ b/docs/developer/testing.rst @@ -56,3 +56,4 @@ API --- .. automodule:: socs.testing.device_emulator :members: + :noindex: diff --git a/docs/index.rst b/docs/index.rst index adec9a08a..ca2cd45d3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,12 +14,14 @@ Contents Section Description =================== ============================================================ User Guide Start here for information about the design and use of SOCS. -Agent Reference Agents are the OCS components that interface with other - systems. These run at all times, awaiting commands from - OCS Clients. These are the Agents specifc to SO hardware. +Agent Reference Details on configuration and use of the OCS Agents + provided by SOCS. Simulator Reference Simulators are used to mock software and hardware interfaces when access to actual hardware is unavailable. These are useful for testing other code. +Developer Guide Information relevant to developers who are contributing to + SOCS. +API Reference Full API documentation for core parts of the SOCS library. =================== ============================================================ .. toctree:: @@ -28,9 +30,8 @@ Simulator Reference Simulators are used to mock software and hardware user/intro user/installation - user/network - user/new_agents user/webserver + user/sequencer .. toctree:: :caption: Agent Reference @@ -41,6 +42,7 @@ Simulator Reference Simulators are used to mock software and hardware agents/chwp_encoder agents/cryomech_cpa agents/fts_agent + agents/hwp_rotation_agent agents/labjack agents/lakeshore240 agents/lakeshore336 @@ -48,17 +50,18 @@ Simulator Reference Simulators are used to mock software and hardware agents/lakeshore372 agents/lakeshore425 agents/latrt_xy_stage + agents/magpie agents/meinberg_m1000_agent agents/pfeiffer - agents/pysmurf/index + agents/pfeiffer_tc400 + agents/pysmurf-controller + agents/pysmurf-monitor agents/scpi_psu agents/smurf_crate_monitor - agents/smurf_recorder agents/suprsync agents/synacc agents/tektronix3021c agents/vantage_pro2 - agents/smurf_file_emulator .. toctree:: :caption: Simulator Reference @@ -66,6 +69,7 @@ Simulator Reference Simulators are used to mock software and hardware simulators/ls240_simulator simulators/ls372_simulator + agents/smurf_file_emulator simulators/smurf_stream_simulator .. toctree:: @@ -75,6 +79,12 @@ Simulator Reference Simulators are used to mock software and hardware developer/snmp developer/testing +.. toctree:: + :caption: API Reference + :maxdepth: 2 + + api + Indices and tables ================== diff --git a/docs/user/installation.rst b/docs/user/installation.rst index d81909dbe..a50a7c05d 100644 --- a/docs/user/installation.rst +++ b/docs/user/installation.rst @@ -3,11 +3,23 @@ Installation ============ -To checkout SOCS, clone the repository and run `setup.py`:: +Install and update with pip:: - git clone https://github.com/simonsobs/socs.git - cd socs/ - pip3 install -r requirements.txt . + $ pip install -U socs + +Optionally install so3g during installation:: + + $ pip install -U socs[so3g] + +Installing from Source +---------------------- + +To install from source, clone the respository and install with pip:: + + git clone https://github.com/simonsobs/socs.git + cd socs/ + pip3 install -r requirements.txt + pip3 install . .. note:: If you are expecting to develop socs code you should consider using @@ -17,7 +29,7 @@ To checkout SOCS, clone the repository and run `setup.py`:: If you would like to install for just the local user, throw the `--user` flag when running `setup.py`. -The Agent scripts live in the ``agents/`` sub-directory of the +The SOCS Agents are kept in the ``agents/`` sub-directory of the repository. Take note of the full path to the ``agents`` directory, as you will need to add it to the OCS site configuration file. @@ -28,21 +40,19 @@ installation script to assist with this, but ``cp -r`` will work. To make the Agent scripts usable within OCS, you must: -- Ensure that the base ``ocs`` package is installed on the local - system. -- Ensure that any additional package dependencies for Agent scripts +- Ensure that any package dependencies for Agent scripts are satisfied. Note that you only need to install the dependencies for the specific Agents that you intend to run on the local system. - Edit the local system's site config file to include the full path to - the SOCS agents directory (see ``Configuration``). + the SOCS agents directory. Site Config File ---------------- -A configured OCS host will contain a ``site config`` file, which -describes how to connect to the crossbar server and (if HostMaster is -in use) describes what Agent instances will run on the system. +A configured OCS host will contain a site config file (SCF), which describes +how to connect to the crossbar server as well as which Agents will run on the +system. To tell OCS about the SOCS agents, update the ``agent-paths`` setting in the host configuration block for your host. The ``agent-paths`` @@ -70,8 +80,8 @@ that ... is a place-holder for other configuration text. # List of paths to Agent plugin modules. 'agent-paths': [ - '/usr/shared/lib/ocs_agents', - '/usr/shared/lib/socs/agents', + '/usr/shared/lib/ocs_agents', + '/usr/shared/lib/socs/agents', ], ... diff --git a/docs/user/intro.rst b/docs/user/intro.rst index 608eac4cd..bac36b082 100644 --- a/docs/user/intro.rst +++ b/docs/user/intro.rst @@ -1,19 +1,23 @@ Introduction ============ -The Simons Observatory Control System (`SOCS`_) codebase contains -device control code for use with the base Observatory Control System -(`OCS`_). +The Simons Observatory Control System (`SOCS`_) library contains +Observatory Control System (`OCS`_) Agents for hardware being used by the +Simons Observatory. Users of SOCS should first confirm that OCS is set up and running -correctly. Then device codes from SOCS can be integrated into the -system. +correctly. Then SOCS Agents can be integrated into the system. -General usage of OCS is covered in the `documentation for OCS`_. In -the present documentation, we first focus on how to obtain the SOCS -code and enable SOCS Agent scripts for use with OCS. Then we provide -descriptions of each hardware module. +General usage of OCS is covered in the `documentation for OCS`_, including +`network configuration`_ and `creating new agents`_. + +In this documentation, we first describe how to obtain the SOCS code and enable +SOCS Agents for use with OCS. Then we provide descriptions of each Agent and +hardware simulator. We conclude with a brief devleoper guide for SOCS library +code. .. _`SOCS`: https://github.com/simonsobs/socs .. _`OCS`: https://github.com/simonsobs/ocs .. _`documentation for OCS`: https://ocs.readthedocs.io/ +.. _network configuration: https://ocs.readthedocs.io/en/latest/user/network.html +.. _creating new agents: https://ocs.readthedocs.io/en/latest/developer/agents.html diff --git a/docs/user/network.rst b/docs/user/network.rst deleted file mode 100644 index 52cd452e8..000000000 --- a/docs/user/network.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _network: - -===================== -Network Configuration -===================== - -Network configuration documentation has moved to the main ocs repository. See -`Network Configuration`_. - -.. _Network Configuration: https://ocs.readthedocs.io/en/latest/user/network.html diff --git a/docs/user/new_agents.rst b/docs/user/new_agents.rst deleted file mode 100644 index 0f0052b7f..000000000 --- a/docs/user/new_agents.rst +++ /dev/null @@ -1,7 +0,0 @@ -Creating New Agents -=================== - -The guide for writing new OCS Agents has moved to the main OCS repository. See -`Creating New Agents`_. - -.. _Creating New Agents: https://ocs.readthedocs.io/en/latest/developer/agents.html diff --git a/docs/user/sequencer.rst b/docs/user/sequencer.rst new file mode 100644 index 000000000..a9ff80b88 --- /dev/null +++ b/docs/user/sequencer.rst @@ -0,0 +1,151 @@ +.. highlight:: bash + +.. _sequencer: + +========= +Sequencer +========= + +Overview +======== + +The 'Sequencer' is a tool with a graphical front end that can be used to run +OCS Clients. Often this will be done with the help of another library, +enabling the command of many Clients in sequence. This page describes the +configuration and use of the Sequencer. + +Description +=========== + +The Sequencer tool is called `nextline +`_. Nextline is composed of a `backend +`_ API, which uses GraphQL, and a +graphical `frontend `_ written in +VueJS. The graphical front end is accessible in your web browser and provides +an interface for running Python code. The code is passed to and executed on the +backend. + +Typically these components will run in two separate Docker containers. In +practice you will have these both behind a reverse proxy. This proxy server may +be in another container. For the purposes of this page we will use an nginx +proxy in a separate container (similar to the one described in +:ref:`webserver`.) + +In order to orchestrate many OCS Clients, we use a helper library called +`sorunlib `_. For setup of nextline with +sorunlib we provide a pre-build Docker image called the `so-daq-sequencer +`_. + +Setup +===== +Here we setup an example set of Docker containers which make nextline available +on your localhost. Here is an example ``docker-compose.yml`` file that provides +both nextline components, an nginx reverse proxy, and persistent storage for +the backend database: + +.. code-block:: yaml + + version: '3.7' + + networks: + default: + external: + name: ocs-net + + services: + frontend: + image: ghcr.io/simonsobs/nextline-web:latest + container_name: nextline-web + environment: + - API_HTTP=http://localhost/nextline/api/ + - PUBLIC_PATH=/nextline/ + + backend: + image: ghcr.io/simonsobs/so-daq-sequencer:latest + container_name: nextline-backend + environment: + - NEXTLINE_DB__URL=sqlite:////db/db.sqlite3 + - OCS_CONFIG_DIR=/config + volumes: + - /srv/sequencer/db:/db + - ${OCS_CONFIG_DIR}:/config:ro + + nginx: + image: nginx:latest + restart: always + volumes: + - ./nginx.conf:/etc/nginx/nginx.conf:ro + ports: + - "127.0.0.1:80:80" + +.. note:: + This example makes use of the externally defined network "ocs-net". This is + to allow the backend to connect to the crossbar server. Your needs may differ + depending on your network configuration. For details on an "ocs-net" like + configuration see the `OCS Docs + `_. + +.. note:: + Your site-config-file must point to the crossbar address as if from the + container running the so-daq-sequencer by default, otherwise the sequencer + will be unable to connect. We are working on solution/recommended + configuration upstream in OCS. Until that is solved be aware that this + may limit local commanding via other methods like running control programs + on the commandline. + +:ref:`webserver` has a full example of ``nginx.conf``. What we need to add to +that config is: + +.. code-block:: nginx + + http { + # Nextline Websocket Connection + # For websocket connection upgrade + # https://www.nginx.com/blog/websocket-nginx/ + map $http_upgrade $connection_upgrade { + default upgrade; + '' close; + } + + server { + ... + + location /nextline/ { + proxy_pass http://nextline-web/nextline/; + + #auth_basic "Restricted Content"; + #auth_basic_user_file /etc/nginx/.htpasswd; + } + + location /nextline/api/ { + proxy_pass http://nextline-backend:8000/; + + # https://www.nginx.com/blog/websocket-nginx/ + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + + #auth_basic "Restricted Content"; + #auth_basic_user_file /etc/nginx/.htpasswd; + } + } + } + +.. warning:: + You absolutely must put the Sequencer behind some form on authentication. + The entire point of the tool is remote code execution, making it dangerous + if exposed to the open internet. + +Once you bring these containers up (with ``docker-compose up -d``), you should +be able to access the Sequencer by pointing your web browser to +http://localhost/nextline/. + +.. note:: + This configuration will get you a standalone local nextline behind a proxy. + To run nextline on a public facing URL you need to set ``API_HTTP`` + appropriately. For example: + + .. code-block:: yaml + + - API_HTTP=https://example.com/nextline/api/ diff --git a/requirements.txt b/requirements.txt index b8c9d7db7..912909aaf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ ocs autobahn[serialization] twisted -mysql-connector>=2.1.6 # Lakeshore240 pyserial diff --git a/requirements/testing.txt b/requirements/testing.txt index fec721ff7..365471893 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -3,3 +3,6 @@ pytest-cov pytest-docker-compose pytest-dependency pytest-order +so3g + +-r ../agents/pfeiffer_tc400/requirements.txt diff --git a/setup.py b/setup.py index bf945764d..5abbc1714 100644 --- a/setup.py +++ b/setup.py @@ -35,4 +35,7 @@ 'sqlalchemy', 'pysnmp', ], + extras_require={ + "so3g": ["so3g"], + }, ) diff --git a/socs/agent/moxaSerial.py b/socs/agent/moxaSerial.py new file mode 100644 index 000000000..76413beec --- /dev/null +++ b/socs/agent/moxaSerial.py @@ -0,0 +1,234 @@ +# 4/2009 BAS +# read() replicates behavior of pyserial +# readexactly() added, which is probably more useful +# readbuf() dumps current buffer contents +# readpacket() has old broken behavior of read() - lowest level / fastest + +import time +import socket + +MOXA_DEFAULT_TIMEOUT = 1.0 + +# Socket modes: +# Nonblocking +# s.setblocking(0) means s.settimeout(0) +# Read returns as much data as possible, does not fail +# Doesn't work for me on windows. Don't use. +# Timeout +# s.settimeout(n) +# Waits until buffer has enough data, then returns +# Throws exception (caught by read) if not enough data is ready after n second. +# Read returns '' on fail +# Blocking +# s.setblocking(1) or s.settimeout(None) +# Waits forever until buffer has enough data, then returns +# This is the default mode for sockets +# Check socket.getdefaulttimeout() to see what mode sockets are created in + +# pyserial style wrapper over IA 5250 TCP Server mode + + +class Serial_TCPServer(object): + """Class to speak with the moxa serial / Ethernet converter. + Set up the moxa box ports according to the specifications of the device + hooked into each serial port. + + A typical sequence of messages for dealing with a device. Create the + socket once:: + + >>> moxa = moxa_serial.Serial_TCPServer(('IP',port),timeout=1.0) + + Then do this sequence, complicated in some way by an individual device's hand + shaking needs:: + + >>> moxa.flushInput() + >>> moxa.write(msg) + >>> moxa.readexactly(n) + + I write a "cmd" methods that handle the proper sequence with checksums etc. + Most devices require a certain delay between commands, which is left to the + user. If using multithreading, wrap your delays in mutexes. + + Args: + port (tuple): (IP addr, TCP port) + timeout (float): Timeout for reading from the moxa box + + """ + + def __init__(self, port, timeout=MOXA_DEFAULT_TIMEOUT): + self.port = port + + self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.sock.setblocking(0) + self.settimeout(timeout) + self.sock.connect(self.port) + + def readexactly(self, n): + """Tries to read exactly n bytes within the timeout. + + Args: + n: Number of bytes to read. + + Returns: + str: Returned message if n bytes were read. Empty string if + ``len(message) != n``. + + """ + t0 = time.time() + msg = "" + timeout = self.gettimeout() + while len(msg) < n: + newtimeout = timeout-(time.time()-t0) + if newtimeout <= 0.0: + break + self.settimeout(newtimeout) + try: + msg = self.sock.recv(n, socket.MSG_PEEK) + except: + pass + # Flush the message out if you got everything + if len(msg) == n: + msg = self.sock.recv(n).decode() + # Otherwise tell nothing and leave the data in the buffer + else: + msg = '' + self.settimeout(timeout) + return msg + + def readbuf_slow(self, n): + """Reads whatever is in the buffer right now, but is O(N) in buffer + size. + + Args: + n: Number of bytes to read. + + """ + msg = '' + self.sock.setblocking(0) + try: + for i in range(n): + msg += self.sock.recv(1) + except: + pass + self.sock.setblocking(1) # belt and suspenders + self.settimeout(self.__timeout) + return msg + + def readbuf(self, n): + """Returns whatever is currently in the buffer. Suitable for large + buffers. + + Args: + n: Number of bytes to read. + + """ + if n == 0: + return '' + try: + msg = self.sock.recv(n) + except: + msg = '' + n2 = min(n-len(msg), n/2) + return msg + self.readbuf(n2) + + def readpacket(self, n): + """Like ``read()``, but may not return everything if the moxa box + flushes too soon. + + Will probably read whatever arrives in the buffer, up to n or the + timeout. Use ``read()`` for certainty. + + """ + try: + msg = self.sock.recv(n) + except: + msg = '' + return msg + + def read(self, n): + """Like ``readexactly()``, but returns whatever is in the buffer if it + can't fill up. + + This replicates the behavior of the read method in pyserial. I feel + that ``readexactly()`` has better behavior for most applications + though. + + Args: + n: Number of bytes to read. Will read at most n bytes. + + Returns: + str: Returned message of up to n bytes. + + """ + msg = self.readexactly(n) + n2 = n-len(msg) + if n2 > 0: + msg += self.readbuf(n2) + return msg + + def readline(self, term='\n'): + msg = '' + while True: + c = self.readexactly(1) + if c == term or c == '': + return msg + msg += c + + def readall(self): + msg = "" + while 1: + c = self.readexactly(1) + if c == '\r': + return msg + if c == '': + return False + msg += c + return msg + + def write(self, msg): + """Sends message to the moxa box. + + Args: + msg (str): Message to send, including terminator (i.e. ``\\r\\n``) if + needed. + + """ + self.sock.send(msg.encode()) + + def writeread(self, msg): + self.flushInput() + self.write(msg) + return self.readall() + + def flushInput(self): + """Erases the input buffer at this moment. + + Before I ask for new info from a device, I flush my + receive buffer to make sure I don't get any garbage in + front. + + """ + self.sock.setblocking(0) + try: + while len(self.sock.recv(1)) > 0: + pass + except: + pass + self.sock.setblocking(1) + self.sock.settimeout(self.__timeout) + + def settimeout(self, timeout): + """Sets the socket in timeout mode.""" + assert timeout > 0.0 + self.__timeout = timeout + self.sock.settimeout(timeout) + # We don't query the socket's timeout or check that they're still + # correct. Since self.sock e is public this could be the wrong + # timeout! + + def gettimeout(self): + return self.__timeout + + timeout = property(gettimeout, settimeout, + doc='Communication timeout. Only use timeout mode ' + + 'with ``timeout > 0.0``.') diff --git a/socs/agent/pmx.py b/socs/agent/pmx.py new file mode 100644 index 000000000..c250ac38c --- /dev/null +++ b/socs/agent/pmx.py @@ -0,0 +1,431 @@ +import time +import serial +import sys +import os + +from socs.agent import moxaSerial as mx + +class PMX: + """The PMX object is for communicating with the Kikusui PMX power supplies. + + Args: + rtu_port (str): Serial RTU port + tcp_ip (str): TCP IP address + tcp_port (int): TCP port + timeout (int): Connection timeout + """ + + def __init__(self, rtu_port=None, tcp_ip=None, tcp_port=None, timeout=None): + # Connect to device + self.using_tcp = None + self._rtu_port = None + self.ser = None + msg = self.__conn(rtu_port, tcp_ip, tcp_port, timeout) + print(msg) + self._remote_Mode() + + # Timing variables + self._tstep = 0.1 # sec + + def __del__(self): + if not self.using_tcp: + print( + "Disconnecting from RTU port %s" + % (self._rtu_port)) + if self.ser: + self.ser.close() + else: + print( + "Disconnecting from TCP IP %s at port %d" + % (self._tcp_ip, self._tcp_port)) + return + + def check_voltage(self): + """Check the voltage.""" + self.clean_serial() + bts = self.ser.write("MEAS:VOLT?\n\r") + self.wait() + val = float(self.ser.readline()) + msg = "Measured voltage = %.3f V" % (val) + # print(msg) + return msg, val + + def check_current(self): + """Check the current.""" + self.clean_serial() + self.ser.write("MEAS:CURR?\n\r") + self.wait() + val = float(self.ser.readline()) + msg = "Measured current = %.3f A" % (val) + # print(msg) + return msg, val + + def check_voltage_current(self): + """Check both the voltage and current.""" + self.clean_serial() + voltage = self.check_voltage()[1] + current = self.check_current()[1] + msg = ( + "Measured voltage = %.3f V\n" + "Measured current = %.3f A\n" + % (voltage, current)) + # print(msg) + return voltage, current + + def check_output(self): + """Return the output status.""" + self.clean_serial() + self.ser.write("OUTP?\n\r") + self.wait() + val = int(self.ser.readline()) + if val == 0: + msg = "Measured output state = OFF" + elif val == 1: + msg = "Measured output state = ON" + else: + msg = "Failed to measure output..." + print(msg) + return msg, val + + def set_voltage(self, val, silent=False): + """Set the PMX voltage.""" + self.clean_serial() + self.ser.write("VOLT %f\n\r" % (float(val))) + self.wait() + self.ser.write("VOLT?\n\r") + self.wait() + val = self.ser.readline() + msg = "Voltage set = %.3f V" % (float(val)) + if (silent != True): + print(msg) + + return msg + + def set_current(self, val, silent=False): + """Set the PMX on.""" + self.clean_serial() + self.ser.write("CURR %f\n\r" % (float(val))) + self.wait() + self.ser.write("CURR?\n\r") + self.wait() + val = self.ser.readline() + msg = "Current set = %.3f A\n" % (float(val)) + if (silent != True): + print(msg) + + return msg + + def use_external_voltage(self): + """Set PMX to use external voltage.""" + self.clean_serial() + self.ser.write("VOLT:EXT:SOUR VOLT\n\r") + self.wait() + self.ser.write("VOLT:EXT:SOUR?\n\r") + self.wait() + val = self.ser.readline() + msg = "External source = %s" % (val) + print(msg) + + return msg + + def ign_external_voltage(self): + """Set PMX to ignore external voltage.""" + self.clean_serial() + self.ser.write("VOLT:EXT:SOUR NONE\n\r") + self.wait() + self.ser.write("VOLT:EXT:SOUR?\n\r") + self.wait() + val = self.ser.readline() + msg = "External source = %s" % (val) + print(msg) + + return msg + + def set_voltage_limit(self, val, silent=False): + """Set the PMX voltage limit.""" + self.clean_serial() + self.ser.write("VOLT:PROT %f\n\r" % (float(val))) + self.wait() + self.ser.write("VOLT:PROT?\n\r") + self.wait() + val = self.ser.readline() + msg = "Voltage limit set = %.3f V" % (float(val)) + if (silent != True): + print(msg) + + return msg + + def set_current_limit(self, val, silent=False): + """Set the PMX current limit.""" + self.clean_serial() + self.ser.write("CURR:PROT %f\n\r" % (float(val))) + self.wait() + self.ser.write("CURR:PROT?\n\r") + self.wait() + val = self.ser.readline() + msg = "Current limit set = %.3f A\n" % (float(val)) + if (silent != True): + print(msg) + + return msg + + def turn_on(self): + """Turn the PMX on.""" + self.clean_serial() + self.ser.write("OUTP ON\n\r") + self.wait() + self.ser.write("OUTP?\n\r") + self.wait() + val = self.ser.readline() + msg = "Output state = %s" % (val) + print(msg) + + return msg + + def turn_off(self): + """Turn the PMX off.""" + self.clean_serial() + self.ser.write("OUTP OFF\n\r") + self.wait() + self.ser.write("OUTP?\n\r") + self.wait() + val = self.ser.readline() + msg = "Output state = %s" % (val) + print(msg) + + return msg + + # ***** Helper Methods ***** + def __conn(self, rtu_port=None, tcp_ip=None, tcp_port=None, timeout=None): + """Connect to the PMX module. + + Args: + rtu_port (str): Serial RTU port + tcp_ip (str): TCP IP address + tcp_port (int): TCP port + timeout (int): Connection timeout + """ + if rtu_port is None and (tcp_ip is None or tcp_port is None): + raise Exception( + "Aborted PMX._conn() due to no RTU or " + "TCP port specified") + elif (rtu_port is not None and + (tcp_ip is not None or tcp_port is not None)): + raise Exception( + "Aborted PMX._conn() due to RTU and TCP port both being " + "specified. Can only have one or the other.") + elif rtu_port is not None: + self.ser = serial.Serial( + port=rtu_port, baudrate=19200, bytesize=8, + parity='N', stopbits=1, timeout=timeout) + self._rtu_port = rtu_port + self.using_tcp = False + msg = "Connected to RTU port %s" % (rtu_port) + elif tcp_ip is not None and tcp_port is not None: + self.ser = mx.Serial_TCPServer((tcp_ip, tcp_port), timeout) + self._tcp_ip = tcp_ip + self._tcp_port = int(tcp_port) + self.using_tcp = True + msg = "Connected to TCP IP %s at port %d" % (tcp_ip, tcp_port) + else: + raise Exception( + "Aborted PMX._conn() due to unknown error") + return msg + + def wait(self): + """Sleep.""" + time.sleep(0.05) + return True + + def clean_serial(self): + """Flush the serial buffer.""" + # if not False: + # self.ser.reset_input_buffer() + # self.ser.reset_output_buffer() + # self.ser.flush() + # else: + # self.ser.flushInput() + self.ser.flushInput() + return True + + def _remote_Mode(self): + """Enable remote control.""" + self.clean_serial() + self.ser.write('SYST:REM\n\r') + self.wait() + return True + + +class Command: + """The Command object is used to command the PMX. + + Args: + PMX (PMX): PMX object + + """ + + def __init__(self, input_PMX): + # PMX connection + if input_PMX is not None: + self._PMX = input_PMX + else: + raise Exception( + "PMX object not passed to Command constructor\n") + + # Dict of commands + self._cmds = { + "set_port": "P", + "check_v": "V?", + "check_c": "C?", + "check_vc": "VC?", + "check_out": "O?", + "set_v": "V", + "set_c": "C", + "set_v_lim": "VL", + "set_c_lim": "CL", + "set_on": "ON", + "set_off": "OFF", + "get_help": "H", + "use_ext": "U", + "ign_ext": "I", + "stop": "Q"} + + def get_help(self): + """Print possible commands.""" + wrstr = ( + "\nChange ttyUSB port = '%s'\n" + "Check output voltage = '%s'\n" + "Check output current = '%s'\n" + "Check output voltage and current = '%s'\n" + "Check output state = '%s'\n" + "Set output voltage = '%s' [setting]\n" + "Set output current = '%s' [setting]\n" + "Set output voltage limit = '%s'\n" + "Set output current limit = '%s'\n" + "Turn output on = '%s'\n" + "Turn output off = '%s'\n" + "Print possible commands = '%s'\n" + "Use external voltage = '%s'\n" + "Ignore external voltage = '%s'\n" + "Quit program = '%s'\n" + % (self._cmds["set_port"], + self._cmds["check_v"], + self._cmds["check_c"], + self._cmds["check_vc"], + self._cmds["check_out"], + self._cmds["set_v"], + self._cmds["set_c"], + self._cmds["set_v_lim"], + self._cmds["set_c_lim"], + self._cmds["set_on"], + self._cmds["set_off"], + self._cmds["get_help"], + self._cmds["use_ext"], + self._cmds["ign_ext"], + self._cmds["stop"])) + return wrstr + + def user_input(self, arg): + """Take user input and execute PMX command.""" + argv = arg.split() + # if len(args) > 0: + #value = float(args[0]) + + while len(argv): + cmd = str(argv.pop(0)).upper() + # No command + if cmd == '': + return + # Check voltage + elif cmd == self._cmds["check_v"]: + return self._PMX.check_voltage() + # Check current + elif cmd == self._cmds["check_c"]: + return self._PMX.check_current() + # Check voltage and current + elif cmd == self._cmds["check_vc"]: + return self._PMX.check_voltage_current() + # Check output state + elif cmd == self._cmds["check_out"]: + return self._PMX.check_output() + # Turn output state ON + elif cmd == self._cmds["set_on"]: + return self._PMX.turn_on() + # Turn output state OFF + elif cmd == self._cmds["set_off"]: + return self._PMX.turn_off() + # Get HELP + + # elif cmd == self._cmds["set_v"]: + # print(value) + #ret = self._PMX.set_voltage(value) + + # elif cmd == self._cmds["set_c"]: + #ret = self._PMX.set_current(value) + + elif cmd == self._cmds["use_ext"]: + return self._PMX.use_external_voltage() + elif cmd == self._cmds["ign_ext"]: + return self._PMX.ign_external_voltage() + elif cmd == self._cmds["get_help"]: + ret = self.get_help() + print(ret) + # Exit the program + elif cmd == self._cmds["stop"]: + sys.exit("\nExiting...") + # Set the RTU port + elif cmd == self._cmds["set_port"]: + if self._PMX.using_tcp: + print("Connected via TCP rather than RTU. Cannot set RTU port") + return False + set_val = self._int(argv.pop(0)) + if set_val is not None: + del self._PMX + self._PMX = PMX(set_val) + else: + return False + elif cmd == self._cmds["set_v"]: + set_val = self._float(argv.pop(0)) + if set_val is not None: + self._PMX.set_voltage(set_val) + else: + return False + elif cmd.lower() == self._cmds["set_c"].lower(): + set_val = self._float(argv.pop(0)) + if set_val is not None: + self._PMX.set_current(set_val) + else: + return False + elif cmd == self._cmds["set_v_lim"]: + set_val = self._float(argv.pop(0)) + if set_val is not None: + self._PMX.set_voltage_limit(set_val) + else: + return False + elif cmd.lower() == self._cmds["set_c_lim"].lower(): + set_val = self._float(argv.pop(0)) + if set_val is not None: + self._PMX.set_current_limit(set_val) + else: + return False + else: + print("Command '%s' not understood..." % (cmd)) + return False + return True + + # ***** Helper Methods ***** + def _float(self, val): + """Try to convert a value to a float.""" + try: + return float(val) + except ValueError: + print("Input '%s' not understood, must be a float..." % (val)) + return None + + def _int(self, val): + """Try to convert a value to an int.""" + try: + return int(val) + except ValueError: + print("Input '%s' not understood, must be a int..." % (val)) + return None diff --git a/socs/agent/smurf_recorder.py b/socs/agent/smurf_recorder.py deleted file mode 100644 index df2cda132..000000000 --- a/socs/agent/smurf_recorder.py +++ /dev/null @@ -1,500 +0,0 @@ -import os -from enum import Enum - -import time -import txaio -import numpy as np -import socket - -# For logging -txaio.use_twisted() - -on_rtd = os.environ.get('READTHEDOCS') == 'True' -if not on_rtd: - import so3g - from spt3g import core - - -class FlowControl(Enum): - """Flow control enumeration.""" - ALIVE = 0 - START = 1 - END = 2 - CLEANSE = 3 - - -def _create_dirname(start_time, data_dir, stream_id): - """Create the file path for .g3 file output. - - Note: This will create directories if they don't exist already. - - Parameters - ---------- - start_time : float - Timestamp for start of data collection - data_dir : str - Top level data directory for output - stream_id : str - Stream id of collection - - Returns - ------- - str - dirname for file for output - - """ - sub_dir = os.path.join(data_dir, - "{:.5}".format(str(start_time)), - stream_id) - - # Create new dir for current day - if not os.path.exists(sub_dir): - os.makedirs(sub_dir) - - return sub_dir - - -def _create_file_path(dirname, basename, suffix="", extension="g3"): - """Create the full path to the output file for writing. - - Parameters - ---------- - dirname : str - dirname for output, likely from _create_dirname - basename : str - basename of file without extension or suffix - suffix : str - Suffix to append to filename (include your own '_') - extension : str - file extension to put on file (without '.') - - Returns - ------- - str - Full filepath of file for output - - """ - filename = "{}{}.{}".format(basename, suffix, extension) - filepath = os.path.join(dirname, filename) - - return filepath - - -class FrameRecorder: - """Frame recording object. Keeps track of the reader/writer and associated - information, such as the frames being written to file and the location of - the output files. - - All of the methods were separated from the loop that ran in the - TimestreamAggregator OCS Agent. They're meant to be looped over still and - for that we provide the run() method. - - This class is not safe to run in the twisted reactor, run in a worker - thread. - - Parameters - ---------- - file_duration : int - seconds per file - tcp_addr : str - tcp connection address (i.e. "tcp://127.0.0.1:4536") - data_dir : str - Location to write data files to. Subdirectories will be created within - this directory roughly corresponding to one day. - stream_id : str - Stream ID to use to determine file path if one is not found in frames. - - Attributes - ---------- - time_per_file : int - Amount of time in seconds to have in each file. Files will be rotated - after this many seconds. - address : str - Address of the host sending the data via the G3NetworkSender. - data_dir : str - Location to write data files to. Subdirectories will be created within - this directory roughly corresponding to one day. - log : txaio.tx.Logger - txaio logger object. - reader : spt3g.core.G3Reader - G3Reader object to read the frames from the G3NetworkSender. - writer : spt3g.core.G3Writer - G3Writer for writing the frames to disk. - data_received : bool - Whether data has been received by the current instance of the G3Reader. - frames : list - List of frames that have been read from the network. Gets cleared after - writing to file. - last_meta - The last meta data Frame passed to the G3Writer. The last meta data - frame gets written to the top of each file. - last_frame_write_time : float - The time at which we wrote the last frame. Used to track when files - should be rotated between acquisitions. - last_connection_time : float - The time at which we last attempted to establish a network connection. - Used to rate limit the number of attempted connections to the - G3NetworkSender. - _last_file_timestamp : int - Timestamp of last filename. Used to check if a new file was started - within the same second a new file is being made to avoid naming - conflicts. - filename_suffix : int - For enumerating files within an acquisition. We want to split files in - 10 minute intervals, but the names should remain the same with a - "_0001" style suffix, which we track with this attribute. - start_time : float - Start time of current file. Only agrees with basename time if - filename_suffix == 0. - dirname : str - The directory path currently in use. - basename : str - The basename of the file currently being used. The actual file will - also contain the tracked suffix. - monitored_channels : list - List of readout channels to be monitored. Data from these channels will - be stored in ``stream_data`` after each ``run`` function call. - target_rate : float - Target sampling rate for monitored channels in Hz. - stream_data : dict - Data containing downsampled timestream data from the monitored. This - dict will have the channel name (such as ``r0012``) as the key, and - the values will have the structure required by the OCS_Feed. - """ - def __init__(self, file_duration, tcp_addr, data_dir, stream_id, - target_rate=10): - # Parameters - self.time_per_file = file_duration - self.address = tcp_addr - self.data_dir = data_dir - self.stream_id = stream_id - self.log = txaio.make_logger() - - # Reader/Writer - self.reader = None - self.writer = None - self.data_received = False - - # Attributes - self.frames = [] - self.last_meta = None - self.last_frame_write_time = None - self.last_connection_time = None - self._last_file_timestamp = None - - self.filename_suffix = 0 - self.start_time = None - self.dirname = None - self.basename = None - - self.monitored_channels = [] - self.target_rate = target_rate - self.stream_data = {} - - def __del__(self): - """Clean up by closing out the file once writing is complete.""" - if self.writer is not None: - self.close_file() - - def _establish_reader_connection(self, timeout=5): - """Establish the connection to the G3NetworkSender. - - Attempts to connect once and waits if connection could not be made and - last connection attempt was very recent. - - Parameters - ---------- - timeout : int - Timeout in seconds for the G3Reader, afterwhich the connection will - drop and G3Reader will return empty lists on each Process call. - - Returns - ------- - reader : spt3g.core.G3Reader - The G3Reader object connected to the configured address and port - - """ - reader = None - - try: - reader = core.G3Reader(self.address, - timeout=timeout) - self.log.debug("G3Reader connection to {addr} established!", - addr=self.address) - except RuntimeError: - self.log.error("G3Reader could not connect.") - - # Prevent rapid connection attempts - if self.last_connection_time is not None: - t_diff = time.time() - self.last_connection_time - if t_diff < 1: - self.log.debug("Last connection was only {d} seconds ago. " + - "Sleeping for {t}.", d=t_diff, t=(1 - t_diff)) - time.sleep(1 - t_diff) - - self.last_connection_time = time.time() - - return reader - - def read_frames(self, timeout=5): - """Establish a connection to the G3NetworkSender if one does not exist, - then try to read frames from it, discarding any flow control frames. - - Clear internally buffered data and cleanup with call to - self.close_file() if the reader timesout or has otherwise lost its - connection. - - Parameters - ---------- - timeout : int - Timeout in seconds for establishing the G3Reader connection. - - """ - # Try to connect if we are not already - if self.reader is None: - self.reader = self._establish_reader_connection(timeout) - - if self.reader is None: - return - - # Allows tests to Mock a reader - if type(self.reader) is core.G3Reader: - self.frames = self.reader.Process(None) - - if self.frames: - # Handle flow control frames - for f in self.frames: - flow = f.get('sostream_flowcontrol') - - if flow is not None: - # START; create_new_file - if FlowControl(flow) is FlowControl.START: - self.close_file() - - # END; close_file - if FlowControl(flow) == FlowControl.END: - self.close_file() - self.last_meta = None - - # ALIVE and CLEANSE; do nothing - if FlowControl(flow) is FlowControl.CLEANSE: - self.log.debug("I saw a CLEANSE frame.") - - # Discard all flow control frames - self.frames = [x for x in self.frames if 'sostream_flowcontrol' not in x] - # Discard Pipeline info frame - self.frames = [x for x in self.frames - if x.type != core.G3FrameType.PipelineInfo] - if self.frames and not self.data_received: - self.data_received = True - self.log.info("Started receiving frames from {addr}", - addr=self.address) - return - else: - if self.data_received: - self.log.info("Could not read frames. Connection " + - "timed out, or G3NetworkSender offline. " + - "Cleaning up...") - self.close_file() - self.data_received = False - self.reader = None - - def check_for_frame_gap(self, gap_size=5): - """Check for incoming frame time gap. If frames stop coming in likely - an acquisition has stopped and we need to rotate files and increment - our file suffix. - - Rotate files to match data acquisitions (assuming 5 seconds apart or - more until flow control improved on streaming end). - - Parameters - ---------- - gap_size : int - gap size threshold in seconds between frames to check for. Close - file if exceeded. - - """ - # Don't even try to rotate files if writer not active. - if self.writer is None: - self.log.debug("Writer not active, not checking frame gap.") - return - - if self.last_frame_write_time is not None: - t_diff = time.time() - self.last_frame_write_time - if t_diff > gap_size: - self.log.debug("Last frame written more than {g} seconds " + - "ago, rotating file", g=t_diff) - self.close_file() - - def create_new_file(self): - """Create a new file if needed. - - This is only done if an existing file is closed out (i.e. writer is - None). Filename and path will be based on the time the file is made. - Acquisitions that are grouped together by file duration will share the - same basename as the start of the acquisition and have their suffix - incremented. - - Writes the last meta data frame to the start of the file if there is - one. - - Example: - # Three 10 minute observations - file_1 = 2019-08-07-01-30-00_000.g3 - file_2 = 2019-08-07-01-30-00_001.g3 - file_3 = 2019-08-07-01-30-00_002.g3 - - # Acqusition stopped and new one started on the hour - new_file = 2019-08-07-02-00-00_000.g3 - - """ - if self.writer is None: - # Used for tracking when to split acquisitions - self.start_time = time.time() - - # Avoid duplicate filenames if new file started within 1 sec - if self._last_file_timestamp is None: - pass - elif int(self.start_time) == self._last_file_timestamp: - self.log.debug("New file started within 1 second of previous " + - "file, incrementing filename suffix.") - self.filename_suffix += 1 - - # Only create new dir and basename if we've finished an acquisition - if self.filename_suffix == 0: - stream_id = self.stream_id - for f in self.frames: - if f.get('sostream_id') is not None: - stream_id = f['sostream_id'] - break - - self.dirname = _create_dirname(self.start_time, - self.data_dir, - stream_id) - self.basename = int(self.start_time) - - suffix = "_{:03d}".format(self.filename_suffix) - filepath = _create_file_path(self.dirname, self.basename, suffix) - self.log.info("Writing to file {}".format(filepath)) - self.writer = core.G3Writer(filename=filepath) - self._last_file_timestamp = int(self.start_time) - - # Write the last metadata frame to the start of the new file - if self.last_meta is not None: - self.writer(self.last_meta) - - return filepath - - def write_frames_to_file(self): - """Write all frames to file. - - Note: Assumes file writer is already instantiated. - - """ - for f in self.frames: - # Make sure we do not record flowcontrol frames - flow = f.get('sostream_flowcontrol') - if flow is not None: - self.log.warn("Received flow control frame with value {v}. " + - "Flow control frames should be discarded " + - "earlier than this", v=flow) - continue - - # Write most recent meta data frame - if f.type == core.G3FrameType.Observation: - self.last_meta = f - - self.writer(f) - self.writer.Flush() - self.last_frame_write_time = time.time() - - # clear frames list - self.frames = [] - - def split_acquisition(self): - """Split the acquisition into multiple files based on file_duration. - Max file length set by file_duration parameter on FrameRecorder object. - - Only rotates if writer currently active. - - """ - if self.writer is None: - return - - t_diff = time.time() - self.start_time - if t_diff > self.time_per_file: - self.log.debug("{s} seconds elapsed since start of file, " + - "splitting acquisition", s=t_diff) - # Flush internal cache and clean-up (no frame written) - self.writer(core.G3Frame(core.G3FrameType.EndProcessing)) - self.writer = None - self.filename_suffix += 1 - - def close_file(self): - """Close a file. Clean up with EndProcessing frame, set self.writer to - None, rezero suffix. - - """ - if self.writer is not None: - # Flush internal cache and clean-up (no frame written) - self.writer(core.G3Frame(core.G3FrameType.EndProcessing)) - self.log.debug("File closed.") - self.writer = None - self.filename_suffix = 0 - - def run(self): - """Run the DataRecorder, performing all frame collection, file setup, - file rotation, and writing to file. - - Will only perform actions if frames can be read, with the exception of - closing the open file if a gap is encountered (i.e. the current - acquisition has stopped.) - - Meant to be run in a loop. - - """ - self.read_frames() - self.check_for_frame_gap(10) - if len(self.monitored_channels) > 0: - try: - self.read_stream_data() - except Exception as e: - self.log.warn("Exception thrown when reading stream data:\n{e}", e=e) - - if self.frames: - self.create_new_file() - self.write_frames_to_file() - self.split_acquisition() - - def read_stream_data(self): - """Reads stream data from ``self.frames``, downsamples it, and stores - it in ``self.stream_data``. - """ - chan_keys = [f'r{c:04}' for c in self.monitored_channels] - self.stream_data = { - k: {'timestamps': [], 'block_name': k, 'data': {k:[]}} - for k in chan_keys - } - - for frame in self.frames: - if frame.type != core.G3FrameType.Scan: - continue - ds_factor = (frame['data'].sample_rate/core.G3Units.Hz) \ - // self.target_rate - if np.isnan(ds_factor): - continue - ds_factor = max(int(ds_factor), 1) - n_samples = frame['data'].n_samples - if 1 < n_samples <= ds_factor: - ds_factor = n_samples - 1 - times = [ - t.time / core.G3Units.s - for t in frame['data'].times()[::ds_factor] - ] - for key in chan_keys: - data = frame['data'].get(key) - if data is None: - continue - self.stream_data[key]['timestamps'].extend(times) - self.stream_data[key]['data'][key].extend(list(data[::ds_factor])) - diff --git a/socs/agent/vantage_pro2/__init__.py b/socs/agent/vantage_pro2/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/socs/agent/vantage_pro2/vantage_pro2.py b/socs/agent/vantage_pro2/vantage_pro2.py index fa3ca7bc7..798af9d04 100644 --- a/socs/agent/vantage_pro2/vantage_pro2.py +++ b/socs/agent/vantage_pro2/vantage_pro2.py @@ -43,9 +43,7 @@ def calc_crc(data): - """ - Calculates CRC - """ + """Calculates CRC.""" crc = 0 for i in data: @@ -55,16 +53,13 @@ def calc_crc(data): class VantagePro2: - """ - Allows communication to Vantage Pro 2 Weather Monitor Module. - Contains commands to be issued and member variables that store - collected data. + """Allows communication to Vantage Pro 2 Weather Monitor Module. + Contains commands to be issued and member variables that store + collected data. """ def __init__(self, path, baud=19200, timeout=1): - """ - Establish serial connection and initialize member variables - """ + """Establish serial connection and initialize member variables.""" if not path: path = '/dev/ttyUSB0' @@ -72,9 +67,7 @@ def __init__(self, path, baud=19200, timeout=1): self.startup() def startup(self): - """ - Wakeup vantage pro 2 console - """ + """Wakeup vantage pro 2 console.""" self.com.write(b"\n") for i in range(0, 3): response = self.com.read(2) @@ -85,9 +78,9 @@ def startup(self): raise TimeoutError("Vantage Pro 2 console not woke") def crc_check(self, data): - """ - Checks received data. If received data has a CRC value of 0, - then correct message received! + """Checks received data. If received data has a CRC value of 0, then + correct message received! + """ crc = calc_crc(data) @@ -103,9 +96,9 @@ def __exit__(self): self.com.close() def conditions_screen(self): - """ - Move console to conditions screen - (where loop command can succesfully be sent) + """Move console to conditions screen (where loop command can + succesfully be sent.) + """ self.startup() @@ -121,10 +114,9 @@ def conditions_screen(self): raise TimeoutError("Command not acknowledged") def msg(self, msg): - """ - Send general command or query to module. - If command received and acknowledged, continue - else retry 2 more times. + """Send general command or query to module. If command received and + acknowledged, continue else retry 2 more times. + """ attempt = 0 @@ -142,9 +134,7 @@ def msg(self, msg): return False def interrupt_daq(self): - """ - Interrupts loop command...if sent before loop command finishes - """ + """Interrupts loop command...if sent before loop command finishes.""" self.startup() self.com.write(b'\n') @@ -158,71 +148,78 @@ def interrupt_daq(self): raise TimeoutError('Data Acquisition not interrupted') def receive_data(self): - """ - Reads weather data from console and returns loop_data{} - User should read Vantage Pro Serial Communication Reference Manual - for the format of the loop data packet. - Specifically for units of data! - - -Barometer trend: the current 3 hour barometer trend. - -60 = Falling rapidly - -20 = Falling slowly - 0 = Steady - 20 = Rising slowly - 60 = Rising rapidly - 80 = No trend available - Any other value = The VantagePro2 does not have the 3 hours - of data needed to determine the barometer trend. - -Barometer: Current barometer reading (Hg/1000) - -Inside Temperature: Temperatue in Fahrenheit (10th of a degree) - -Inside Humidity: Relative humidity in percent - -Outside Temperature: Temperature in Fahrenheit (10th of a degree) - -Wind Speed: Wind speed in miles per hour - -10 min average wind speed: 10 minute average wind speed in mph - -Wind Direction: From 1-360 degrees - 0 = No wind direction data - 90 = East - 180 = South - 270 = West - 360 = North - Extra Temperatures: Temperature from up to 7 extra temperature - stations. - Soil Temperatures: Four soil temperature sensors, in the same - format as the extra temperatures format listed above. - Leaf Temperatures: Four leaf temperature sensors, in the same - format as the extra temperatures format listed above. - Outside Humidity: Relativie humidity in percent - Extra Humidities: Realtive humidity in percent for 7 humidity - stations - Rain Rate: Number of rain clicks (0.1in or 0.2mm) per hour - UV: "Unit is in UV index" - Solar Radiation: Units in watt/meter^2 - Storm Rain: Stored in 100th of an inch - Start Date of Current storm: Gives month, date, - and year (offset by 2000) - Day Rain: Number of rain clicks (0.1in or 0.2mm)/hour - in the past day - Month Rain: Number of rain clicks (0.1in or 0.2mm)/hour - in the past month - Year Rain: Number of rain clicks (0.1in or 0.2mm)/hour - in the past year - Day ET: 1000th of an inch - Month ET: 1000th of an inch - Year ET: 1000th of an inch - Soil Moistures: In centibar, supports 4 soil sensors - Leaf Wetnesses: Scale from 0-15. Supports 4 leaf sensors - 0 = Very dry - 15 = Very wet - Inside Alarms: Currently active inside alarms - Rain Alarms: Currently active rain alarms - Outside Alarms: Currently active outside alarms - Outside Humidity Alarms: Currently active humidity alarms - Extra Temp/Hum Alarms: Currently active extra - temperature/humidity alarms - Soil & Leaf Alarms: Currently active soil/leaf alarms - Console Battery Voltage: Voltage - Time of Sunrise: Time is stored as hour x 100 + min - Time of Sunset: Time is stored as hour x 100 + min + """Reads weather data from console and returns loop_data{}. + User should read Vantage Pro Serial Communication Reference Manual + for the format of the loop data packet. + Specifically for units of data! Available fields: + + * Barometer trend: the current 3 hour barometer trend. Possible values:: + + -60 = Falling rapidly + -20 = Falling slowly + 0 = Steady + 20 = Rising slowly + 60 = Rising rapidly + 80 = No trend available + + Any other value = The VantagePro2 does not have the 3 hours + of data needed to determine the barometer trend. + + * Barometer: Current barometer reading (Hg/1000) + * Inside Temperature: Temperatue in Fahrenheit (10th of a degree) + * Inside Humidity: Relative humidity in percent + * Outside Temperature: Temperature in Fahrenheit (10th of a degree) + * Wind Speed: Wind speed in miles per hour + * 10 min average wind speed: 10 minute average wind speed in mph + * Wind Direction: From 1-360 degrees. Possible values:: + + 0 = No wind direction data + 90 = East + 180 = South + 270 = West + 360 = North + + * Extra Temperatures: Temperature from up to 7 extra temperature + stations. + * Soil Temperatures: Four soil temperature sensors, in the same + format as the extra temperatures format listed above. + * Leaf Temperatures: Four leaf temperature sensors, in the same + format as the extra temperatures format listed above. + * Outside Humidity: Relativie humidity in percent + * Extra Humidities: Realtive humidity in percent for 7 humidity + stations + * Rain Rate: Number of rain clicks (0.1in or 0.2mm) per hour + * UV: "Unit is in UV index" + * Solar Radiation: Units in watt/meter^2 + * Storm Rain: Stored in 100th of an inch + * Start Date of Current storm: Gives month, date, + and year (offset by 2000) + * Day Rain: Number of rain clicks (0.1in or 0.2mm)/hour + in the past day + * Month Rain: Number of rain clicks (0.1in or 0.2mm)/hour + in the past month + * Year Rain: Number of rain clicks (0.1in or 0.2mm)/hour + in the past year + * Day ET: 1000th of an inch + * Month ET: 1000th of an inch + * Year ET: 1000th of an inch + * Soil Moistures: In centibar, supports 4 soil sensors + * Leaf Wetnesses: Scale from 0-15. Supports 4 leaf sensors. Possible + values:: + + 0 = Very dry + 15 = Very wet + + * Inside Alarms: Currently active inside alarms + * Rain Alarms: Currently active rain alarms + * Outside Alarms: Currently active outside alarms + * Outside Humidity Alarms: Currently active humidity alarms + * Extra Temp/Hum Alarms: Currently active extra + temperature/humidity alarms + * Soil & Leaf Alarms: Currently active soil/leaf alarms + * Console Battery Voltage: Voltage + * Time of Sunrise: Time is stored as hour x 100 + min + * Time of Sunset: Time is stored as hour x 100 + min """ @@ -322,9 +319,9 @@ def receive_data(self): return loop_data def weather_daq(self): - """ - Issues "LOOP 1"(which samples weather data) command - to weather station, and unpacks the data. + """Issues "LOOP 1"(which samples weather data) command to weather + station, and unpacks the data. + """ # Startup and issue loop command @@ -337,10 +334,9 @@ def weather_daq(self): return self.receive_data() def print_data(self, data): - """ - Prints contents of data collected from LOOP command. - Loop: loop# - Field : Value format. + """Prints contents of data collected from LOOP command. + Loop: loop# + Field : Value format. """ print("**************") diff --git a/socs/db/pysmurf_files_manager.py b/socs/db/pysmurf_files_manager.py deleted file mode 100644 index 95712218f..000000000 --- a/socs/db/pysmurf_files_manager.py +++ /dev/null @@ -1,165 +0,0 @@ -""" -Script to create, update, or drop the `pysmurf_files` database -""" -import argparse -import mysql.connector -import getpass - - -class Column: - def __init__(self, name, type, opts="", fmt="%s"): - self.name = name - self.type = type - self.opts = opts - self.fmt = fmt - - def __str__(self): - return " ".join([self.name, self.type, self.opts]) - -TABLE_VERSION = 1 -table = f'pysmurf_files_v{TABLE_VERSION}' -columns = [ - Column("id", "INT", opts="NOT NULL AUTO_INCREMENT PRIMARY KEY"), - Column("path", "VARCHAR(260)", opts="NOT NULL"), - Column("action", "VARCHAR(32)"), - Column("timestamp", "TIMESTAMP"), - Column("action_timestamp", "INT"), - Column("format", "VARCHAR(32)"), - Column("plot", "TINYINT(1)"), - Column("site", "VARCHAR(32)"), - Column("pub_id", "VARCHAR(32)"), - Column("script_id", "VARCHAR(32)"), - Column("instance_id", "VARCHAR(32)"), - Column("copied", "TINYINT(1)"), - Column("failed_copy_attempts", "INT"), - Column("md5sum", "BINARY(16)", opts="NOT NULL", fmt="UNHEX(%s)"), - Column("pysmurf_version", "VARCHAR(64)"), - Column("socs_version", "VARCHAR(64)"), -] -col_dict = {c.name: c for c in columns} - - -def add_entry(cur, entry): - """ - Adds entry to table. - """ - if not set(col_dict.keys()).issuperset(entry.keys()): - raise RuntimeError( - "Invalid file entry provided... \n" - "Keys given: {}\n" - "Keys allowed: {}".format(entry.keys(), col_dict.keys()) - ) - - keys, vals, fmts = [], [], [] - for k in list(entry.keys()): - if entry[k] is None: - continue - keys.append(k) - vals.append(entry[k]) - fmts.append(col_dict[k].fmt) - - query = "INSERT INTO {} ({}) VALUES ({})"\ - .format(table, ", ".join(keys), ", ".join(fmts)) - - cur.execute(query, tuple(vals)) - - -def create_table(cur, update=True): - """ - Creates new pysmurf_files table from scratch. - - Args: - cur (MySQL Cursor): - cursor to files db - update (optional, bool): - Add additional columns if existing table is not up to date. - """ - - cur.execute("SHOW TABLES;") - table_names = [x[0] for x in cur.fetchall()] - if f'{table}' not in table_names: - print(f"Creating {table} table...") - col_strings = [str(c) for c in columns] - query = "CREATE TABLE {} ({});".format(table, ", ".join(col_strings)) - - try: - cur.execute(query) - print(f"Created table {table}") - except mysql.connector.errors.ProgrammingError as e: - print(e) - else: - print(f"Found table {table} and not updating") - - -def drop_table(cur): - """ - Drops pysmurf_files. - """ - try: - cur.execute(f"DROP TABLE {table};") - con.commit() - print(f"{table} dropped.") - except mysql.connector.errors.ProgrammingError as e: - print(e) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('cmd', choices=['create', 'update', 'drop', 'test']) - parser.add_argument('--password', '-p', type=str, - help="Password for development database") - args = parser.parse_args() - - sql_config = { - 'user': 'development', - 'database': 'files', - 'passwd': args.password - } - - if sql_config['passwd'] is None: - sql_config['passwd'] = getpass.getpass("Password for development db: ") - - con = mysql.connector.connect(**sql_config) - cur = con.cursor() - try: - if args.cmd == 'test': - import datetime - entry = { - 'path':'/data/pysmurf_test/1568779322_fake_tuning4.txt', - 'type': 'fake_tuning', - 'timestamp': datetime.datetime(2019, 9, 18, 4, 2, 2, 188764), - 'plot': 1, - 'format': 'txt', - 'md5sum': '7ac66c0f148de9519b8bd264312c4d64', - 'site': 'observatory', - 'instance_id': 'pysmurf-monitor', - 'copied': 0, - 'failed_copy_attempts': 0, - 'socs_version': '0+untagged.140.g5307c6b.dirty' - } - - add_entry(cur, entry) - con.commit() - if args.cmd == 'create': - create_table(cur) - con.commit() - elif args.cmd == 'update': - update_columns(cur) - con.commit() - elif args.cmd == 'drop': - while True: - resp = input("Are you sure you want to drop pysmurf_files? [y/n]: ") - if resp.lower().strip() == 'y': - drop_table(cur) - con.commit() - break - elif resp.lower().strip() == 'n': - break - else: - print("Could not recognize input") - finally: - print("Closing connection") - con.close() - - - diff --git a/socs/snmp.py b/socs/snmp.py index 15e8d606e..7ead85e73 100644 --- a/socs/snmp.py +++ b/socs/snmp.py @@ -1,7 +1,7 @@ import txaio from pysnmp.hlapi.twisted import getCmd, SnmpEngine, CommunityData, UdpTransportTarget,\ - ContextData, ObjectType, ObjectIdentity + ContextData, ObjectType, ObjectIdentity, UsmUserData # For logging txaio.use_twisted() @@ -79,7 +79,7 @@ def _failure(self, error_indication): """ self.log.error('%s failure: %s' % (self.address, error_indication)) - def get(self, oid_list): + def get(self, oid_list, version): """Issue a getCmd to get SNMP OID states. Example @@ -111,6 +111,14 @@ def get(self, oid_list): .. _Specifying MIB Objects: http://snmplabs.com/pysnmp/docs/pysnmp-hlapi-tutorial.html#specifying-mib-object + version : int + SNMP version for communicaton (1, 2, or 3). All versions supported + here without auth or privacy. If using v3 the configured username + on the SNMP device should be 'ocs'. For details on version + implementation in pysnmp see `SNMP Versions`_. + + .. _SNMP Versions: + https://pysnmp.readthedocs.io/en/latest/examples/hlapi/v3arch/asyncore/sync/manager/cmdgen/snmp-versions.html Returns ------ @@ -122,8 +130,17 @@ def get(self, oid_list): """ oid_list = [ObjectType(ObjectIdentity(*x)) if isinstance(x, tuple) else x for x in oid_list] + if version == 1: + version_object = CommunityData('public', mpModel=0) # SNMPv1 + elif version == 2: + version_object = CommunityData('public') # SNMPv2c + elif version == 3: + version_object = UsmUserData('ocs') # SNMPv3 (no auth, no privacy) + else: + raise ValueError(f'SNMP version {version} not supported.') + datagram = getCmd(self.snmp_engine, - CommunityData('public', mpModel=0), # SNMPv1 + version_object, self.udp_transport, ContextData(), *oid_list) diff --git a/socs/testing/device_emulator.py b/socs/testing/device_emulator.py index d0c35f653..c08a0c3ab 100644 --- a/socs/testing/device_emulator.py +++ b/socs/testing/device_emulator.py @@ -56,7 +56,7 @@ class DeviceEmulator: Attributes: responses (dict): Current set of responses the DeviceEmulator would - give + give. Should all be strings, not bytes-like. default_response (str): Default response to send if a command is unrecognized. No response is sent and an error message is logged if a command is unrecognized and the default response is set to None. @@ -64,6 +64,7 @@ class DeviceEmulator: _type (str): Relay type, either 'serial' or 'tcp'. _read (bool): Used to stop the background reading of data recieved on the relay. + _conn (socket.socket): TCP connection for use in 'tcp' relay. """ def __init__(self, responses): @@ -71,6 +72,7 @@ def __init__(self, responses): self.default_response = None self._type = None self._read = True + self._conn = None @staticmethod def _setup_socat(): @@ -175,14 +177,19 @@ def shutdown(self): itself. """ - #print('shutting down background reading') + # print('shutting down background reading') self._read = False time.sleep(1) if self._type == 'serial': - #print('shutting down socat relay') + # print('shutting down socat relay') self.proc.terminate() out, err = self.proc.communicate() - #print(out, err) + # print(out, err) + if self._type == 'tcp': + # print('shutting down background tcp relay') + if self._conn: + self._conn.close() + self._sock.close() def _read_socket(self, port): """Loop until shutdown, reading any commands sent over the relay. @@ -195,30 +202,34 @@ def _read_socket(self, port): self._read = True # Listen for connections - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(('127.0.0.1', port)) - sock.listen(1) + self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self._sock.bind(('127.0.0.1', port)) + self._sock.listen(1) print("Device emulator waiting for tcp client connection") - conn, client_address = sock.accept() + self._conn, client_address = self._sock.accept() print(f"Client connection made from {client_address}") while self._read: - msg = conn.recv(4096).strip().decode('utf-8') + msg = self._conn.recv(4096).strip().decode('utf-8') if msg: print(f"msg='{msg}'") response = self._get_response(msg) + # Avoid user providing bytes-like response + if isinstance(response, bytes): + response = response.decode() + if response is None: continue print(f"response='{response}'") - conn.sendall((response).encode('utf-8')) + self._conn.sendall((response).encode('utf-8')) time.sleep(0.01) - conn.close() - sock.close() + self._conn.close() + self._sock.close() def create_tcp_relay(self, port): """Create the TCP relay, emulating a hardware device connected over @@ -259,6 +270,10 @@ def define_responses(self, responses, default_response=None): >>> responses = {'*IDN?': 'LSCI,MODEL425,4250022,1.0', 'RDGFIELD?': ['+1.0E-01', '+1.2E-01', '+1.4E-01']} + Notes: + The responses defined should all be strings, not bytes-like. The + DeviceEmulator will handle encoding/decoding. + """ print(f"responses set to {responses}") self.responses = responses diff --git a/socs/util.py b/socs/util.py index 9b898b9bf..43922390f 100644 --- a/socs/util.py +++ b/socs/util.py @@ -10,19 +10,3 @@ def get_md5sum(filename): for line in open(filename, 'rb'): m.update(line) return m.hexdigest() - - -@contextmanager -def get_db_connection(**config): - """ - Mysql connection context manager. - - Same args as mysql.connector: - https://dev.mysql.com/doc/connector-python/en/connector-python-connectargs.html - """ - import mysql.connector - con = mysql.connector.connect(**config) - try: - yield con - finally: - con.close() diff --git a/tests/agent/test_moxaSerial.py b/tests/agent/test_moxaSerial.py new file mode 100644 index 000000000..95a8266fb --- /dev/null +++ b/tests/agent/test_moxaSerial.py @@ -0,0 +1,47 @@ +import time +import pytest + +from socs.agent import moxaSerial +from socs.testing.device_emulator import create_device_emulator + + +tcp_emulator = create_device_emulator({'ping': 'pong\r'}, + 'tcp', 9001) + + +# Tried this as a fixture, but connections weren't cleaning up properly. +def create_tcpserver(): + # Connection might not work on first attempt + for i in range(5): + try: + ser = moxaSerial.Serial_TCPServer(('127.0.0.1', 9001), 0.1) + break + except ConnectionRefusedError: + print("Could not connect, waiting and trying again.") + time.sleep(1) + return ser + + +@pytest.mark.integtest +def test_moxaserial_create_serial_tcpserver(tcp_emulator): + create_tcpserver() + + +@pytest.mark.integtest +def test_moxaserial_write(tcp_emulator): + ser = create_tcpserver() + ser.write('ping') + + +@pytest.mark.integtest +def test_moxaserial_writeread(tcp_emulator): + ser = create_tcpserver() + response = ser.writeread('ping') + assert response == 'pong' + + +@pytest.mark.integtest +def test_moxaserial_write_readline(tcp_emulator): + ser = create_tcpserver() + ser.write('ping') + assert ser.readline() == 'pong\r' diff --git a/tests/agent/test_pmx.py b/tests/agent/test_pmx.py new file mode 100644 index 000000000..5717e0950 --- /dev/null +++ b/tests/agent/test_pmx.py @@ -0,0 +1,152 @@ +import time +import pytest + +from socs.agent.pmx import Command, PMX +from socs.testing.device_emulator import create_device_emulator + + +tcp_emulator = create_device_emulator({'ping': 'pong\r', + 'SYST:REM': 'test'}, + 'tcp', 9001) + + +# Tried this as a fixture, but connections weren't cleaning up properly. +def create_command(): + # Connection might not work on first attempt + for i in range(5): + try: + pmx = PMX(tcp_ip='127.0.0.1', tcp_port=9001, timeout=0.1) + cmd = Command(pmx) + break + except ConnectionRefusedError: + print("Could not connect, waiting and trying again.") + time.sleep(1) + return cmd + + +@pytest.mark.integtest +def test_pmx_create_command(tcp_emulator): + create_command() + + +@pytest.mark.integtest +def test_pmx_destructor(tcp_emulator): + cmd = create_command() + del cmd + + +@pytest.mark.integtest +def test_pmx_cmd_on(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'OUTP ON': '', + 'OUTP?': 'on'}) + + cmd.user_input('on') + + +@pytest.mark.integtest +def test_pmx_cmd_off(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'OUTP OFF': '', + 'OUTP?': 'off'}) + + cmd.user_input('off') + + +@pytest.mark.integtest +def test_pmx_cmd_set_voltage(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'VOLT 1': '', + 'VOLT?': '1'}) + + cmd.user_input('V 1') + + +@pytest.mark.integtest +def test_pmx_cmd_set_current(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'CURR 1': '', + 'CURR?': '1'}) + + cmd.user_input('C 1') + + +@pytest.mark.integtest +def test_pmx_cmd_set_voltage_limit(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'VOLT:PROT 1': '', + 'VOLT:PROT?': '1'}) + + cmd.user_input('VL 1') + + +@pytest.mark.integtest +def test_pmx_cmd_set_current_limit(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'CURR:PROT 1': '', + 'CURR:PROT?': '1'}) + + cmd.user_input('CL 1') + + +@pytest.mark.integtest +def test_pmx_cmd_use_external_voltage(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'VOLT:EXT:SOUR VOLT': '', + 'VOLT:EXT:SOUR?': 'source_name'}) + + assert 'source_name' in cmd.user_input('U') + + +@pytest.mark.integtest +def test_pmx_cmd_ignore_external_voltage(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'VOLT:EXT:SOUR NONE': '', + 'VOLT:EXT:SOUR?': 'False'}) + + assert 'False' in cmd.user_input('I') + + +@pytest.mark.integtest +def test_pmx_cmd_check_voltage(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'MEAS:VOLT?': '1'}) + + msg, val = cmd.user_input('V?') + assert val == 1 + + +@pytest.mark.integtest +def test_pmx_cmd_check_current(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'MEAS:CURR?': '1'}) + + msg, val = cmd.user_input('C?') + assert val == 1 + + +@pytest.mark.integtest +def test_pmx_cmd_check_voltage_current(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'MEAS:VOLT?': '2', + 'MEAS:CURR?': '1'}) + + volt, curr = cmd.user_input('VC?') + assert volt == 2 + assert curr == 1 + + +@pytest.mark.integtest +def test_pmx_cmd_check_output(tcp_emulator): + cmd = create_command() + tcp_emulator.define_responses({'OUTP?': '0'}) + msg, val = cmd.user_input('O?') + assert val == 0 + + tcp_emulator.define_responses({'OUTP?': '1'}) + msg, val = cmd.user_input('O?') + assert val == 1 + + tcp_emulator.define_responses({'OUTP?': '2'}) + msg, val = cmd.user_input('O?') + assert val == 2 diff --git a/tests/agents/hwp_rotation/test_pid_controller.py b/tests/agents/hwp_rotation/test_pid_controller.py new file mode 100644 index 000000000..14e12ea90 --- /dev/null +++ b/tests/agents/hwp_rotation/test_pid_controller.py @@ -0,0 +1,50 @@ +import sys +sys.path.insert(0, '../agents/hwp_rotation/') +from src.pid_controller import PID + +from socs.testing.device_emulator import create_device_emulator + +pid_emu = create_device_emulator( + {'*W02400000': 'W02\r'}, relay_type='tcp', port=3003) + + +def test_send_message(pid_emu): + pid = PID('127.0.0.1', 3003) + responses = {'ping': 'pong'} + pid_emu.define_responses(responses) + resp = pid.send_message('ping') + assert resp == 'pong' + + +def test_get_direction(pid_emu): + pid = PID('127.0.0.1', 3003) + responses = {'*R02': 'R02400000\r'} + pid_emu.define_responses(responses) + pid.get_direction() + + +def test_set_direction(pid_emu): + pid = PID('127.0.0.1', 3003) + responses = {"*W02400000": 'W02\r'} + pid_emu.define_responses(responses) + pid.set_direction('0') + + +def test_decode_read(): + print(PID._decode_read('R02400000')) + + +def test_decode_read_unknown(): + print(PID._decode_read('R03400000')) + + +def test_decode_write(): + print(PID._decode_write('W02')) + + +def test_decode_array(): + print(PID._decode_array(['R02400000'])) + + +def test_decode_measure_unknown(): + assert PID._decode_measure(['R02400000']) == 9.999 diff --git a/tests/agents/test_pysmurf_archiver.py b/tests/agents/test_pysmurf_archiver.py deleted file mode 100644 index 26668df97..000000000 --- a/tests/agents/test_pysmurf_archiver.py +++ /dev/null @@ -1,3 +0,0 @@ -import sys -sys.path.insert(0, '../agents/pysmurf_archiver/') -from pysmurf_archiver_agent import PysmurfArchiverAgent diff --git a/tests/agents/test_pysmurf_controller.py b/tests/agents/test_pysmurf_controller.py deleted file mode 100644 index f01f81df1..000000000 --- a/tests/agents/test_pysmurf_controller.py +++ /dev/null @@ -1,3 +0,0 @@ -import sys -sys.path.insert(0, '../agents/pysmurf_controller/') -from pysmurf_controller import PysmurfController diff --git a/tests/agents/test_smurf_file_emulator.py b/tests/agents/test_smurf_file_emulator.py index 000b6d53d..81d75aa8a 100644 --- a/tests/agents/test_smurf_file_emulator.py +++ b/tests/agents/test_smurf_file_emulator.py @@ -1,8 +1,8 @@ import sys sys.path.insert(0, '../agents/smurf_file_emulator/') import os -from argparse import Namespace -from smurf_file_emulator import SmurfFileEmulator +from smurf_file_emulator import SmurfFileEmulator, make_parser + from ocs.ocs_agent import OpSession @@ -12,25 +12,27 @@ import txaio txaio.use_twisted() -def create_agent(base_dir, file_duration=10): +def create_agent(base_dir, file_duration=10, frame_len=2, + nchans=1024): """Test fixture to setup a mocked OCSAgent.""" mock_agent = mock.MagicMock() log = txaio.make_logger() txaio.start_logging(level='debug') mock_agent.log = log log.info('Initialized mock OCSAgent') - args = Namespace( - stream_id='test_em', base_dir=base_dir, - stream_on_start=False, file_duration=file_duration - ) - + parser = make_parser() + args = parser.parse_args(args=[ + '--stream-id', 'test_em', '--base-dir', base_dir, + '--file-duration', str(file_duration), '--frame-len', str(frame_len) + ]) agent = SmurfFileEmulator(mock_agent, args) return agent def test_tune_up(tmp_path): emulator = create_agent(str(tmp_path)) session = mock.MagicMock() - emulator.tune_dets(session) + session.data = {} + emulator.tune_dets(session, {'test_mode': True}) def test_take_iv(tmp_path): emulator = create_agent(str(tmp_path)) @@ -47,7 +49,9 @@ def test_bias_dets(tmp_path): session = mock.MagicMock() emulator.bias_dets(session) + def test_stream(tmp_path): - emulator = create_agent(str(tmp_path), file_duration=0.2) + emulator = create_agent(str(tmp_path), file_duration=0.2, frame_len=0.05) session = mock.MagicMock() + session.data ={} emulator.stream(session, params={'duration': 1}) diff --git a/tests/agents/test_smurf_recorder.py b/tests/agents/test_smurf_recorder.py deleted file mode 100644 index e066c2958..000000000 --- a/tests/agents/test_smurf_recorder.py +++ /dev/null @@ -1,302 +0,0 @@ -import time -from unittest.mock import MagicMock - -import pytest - -try: - import so3g - from spt3g import core - - from socs.agent.smurf_recorder import FrameRecorder, FlowControl -except ModuleNotFoundError as e: - print(f"Unable to import spt3g: {e}") - -@pytest.mark.spt3g -@pytest.mark.dependency(name="spt3g") -def test_spt3g_import(): - """Test that we can import spt3g. Used to skip dependent tests.""" - from spt3g import core - - # Just to avoid flake8 complaining we aren't using these imports - print(core.__file__) - - -@pytest.fixture -def frame_recorder(tmpdir): - p = tmpdir.mkdir("data") - record = FrameRecorder(10, "tcp://127.0.0.1:4536", p, 'test_id') - return record - - -@pytest.fixture -def networksender(): - networksender = core.G3NetworkSender(hostname="*", port=4536) - return networksender - - -# test basic initialization -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -def test_frame_recorder_init(frame_recorder): - pass - - -# Test reader connection -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -class TestReaderConnection(): - def test_g3reader_connection(self, frame_recorder, networksender): - """Test we can establish a connection with a G3NetworkSender.""" - frame_recorder._establish_reader_connection(timeout=1) - networksender.Close() - networksender = None - - def test_failed_g3reader_connection(self, frame_recorder): - """Without a backend setup this will fail, though we catch the - RuntimeError, so it should pass anyway. - - """ - frame_recorder._establish_reader_connection(timeout=1) - - def test_rapid_reader_connection(self, frame_recorder): - """Without a backend setup this will fail, though we catch the - RuntimeError, so it should pass anyway. - - """ - # Fake our last connection time, this should make the check of - # t_diff < 1 true - frame_recorder.last_connection_time = time.time() - frame_recorder._establish_reader_connection(timeout=1) - - -# test split_acquisition -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -class TestSplitAcquisition(): - """Splitting an acquisition is time based, and rotates the file we're - writing to. This class tests various scenarios with this file splitting. - - """ - def test_null_writer_on_split(self, frame_recorder): - """If writer is None we should just return.""" - frame_recorder.split_acquisition() - - def test_split(self, frame_recorder): - """We should split if the current time - last file start time is - greater than the time_per_file set for the frame recorder. - - In this test suite we've set a 10 second time_per_file. Here we mock a - writer to get passed the writer is None check and force our start_time - to be well enough in the past to get a file split. - - """ - frame_recorder.writer = MagicMock() - frame_recorder.start_time = time.time() - 20 - frame_recorder.split_acquisition() - - assert frame_recorder.filename_suffix == 1 - assert frame_recorder.writer is None - - -# test close_file -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -def test_close_file(frame_recorder): - """Test closing out the file and removing the writer.""" - frame_recorder.writer = MagicMock() - frame_recorder.filename_suffix = 1 - frame_recorder.close_file() - - assert frame_recorder.writer is None - assert frame_recorder.filename_suffix == 0 - - -# test check_for_frame_gap -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -class TestFrameGap(): - def test_null_writer_check_for_frame_gap(self, frame_recorder): - """If writer is None return.""" - frame_recorder.check_for_frame_gap() - - def test_check_for_frame_gap(self, frame_recorder): - """Mock a writer and test that we detect the gap and close the file.""" - frame_recorder.writer = MagicMock() - frame_recorder.last_frame_write_time = time.time() - 10 - frame_recorder.check_for_frame_gap() - - assert frame_recorder.writer is None - - -# test create_new_file -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -class TestCreateNewFile(): - def test_create_new_file(self, frame_recorder): - """Writer should be None to create a new file.""" - frame_recorder.create_new_file() - - assert frame_recorder.writer is not None - assert frame_recorder.start_time is not None - - def test_rapid_create_new_file(self, frame_recorder): - # Force a quick file creation and remove the writer - frame_recorder.create_new_file() - frame_recorder.writer = None - - frame_recorder.create_new_file() - - assert frame_recorder.filename_suffix == 1 - - def test_write_last_meta(self, frame_recorder): - """If last_meta exists, write it to file.""" - f = core.G3Frame(core.G3FrameType.Observation) - f['session_id'] = 0 - f['start_time'] = time.time() - - frame_recorder.last_meta = f - frame_recorder.create_new_file() - - -# test write_frames_to_file -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -class TestWriteFrames(): - def test_write_flowcontrol_frame(self, frame_recorder): - """Flow control frames should log a warning and continue.""" - f = core.G3Frame(core.G3FrameType.none) - f['sostream_flowcontrol'] = FlowControl.START.value - - frame_recorder.frames = [f] - frame_recorder.write_frames_to_file() - - assert frame_recorder.frames == [] - - def test_write_obs_frame(self, frame_recorder): - """Test writing an observation frame to file.""" - f = core.G3Frame(core.G3FrameType.Observation) - f['session_id'] = 0 - f['start_time'] = time.time() - - filepath = frame_recorder.create_new_file() - frame_recorder.frames = [f] - frame_recorder.write_frames_to_file() - - # Check the frame we wrote - frames = [fr for fr in core.G3File(filepath)] - assert len(frames) == 1 - assert frames[0]['session_id'] == 0 - assert frames[0].type is core.G3FrameType.Observation - assert frame_recorder.last_meta is f - - -# test read_frames -@pytest.mark.spt3g -@pytest.mark.dependency(depends=["spt3g"]) -class TestReadFrames(): - def test_failed_g3reader_connection(self, frame_recorder): - """Should just return if we can't get a connection. - - """ - frame_recorder.read_frames(1) - - def test_g3reader_connection(self, frame_recorder, networksender): - """Test we can establish a connection with a G3NetworkSender. - - We aren't sending any frames, so we just make the connection, no frames - will be processed, so we timeout. - - """ - frame_recorder.read_frames(1) - networksender.Close() - networksender = None - assert frame_recorder.reader is None - - def test_start_frame_processing(self, frame_recorder): - """Start frame should close the currently open file.""" - frame_recorder.reader = MagicMock() # mock the reader - - # Make conditions so asserts would fail at end - frame_recorder.writer = MagicMock() - frame_recorder.filename_suffix = 1 - - f = core.G3Frame(core.G3FrameType.none) - f['sostream_flowcontrol'] = FlowControl.START.value - - frame_recorder.frames = [f] - frame_recorder.read_frames() - - assert frame_recorder.writer is None - assert frame_recorder.filename_suffix == 0 - - # check flow control frame discarded - assert frame_recorder.frames == [] - - def test_end_frame_processing(self, frame_recorder): - """END frame should close the currently open file, and unset the - last_meta cache. - - """ - frame_recorder.reader = MagicMock() # mock the reader - - # Make conditions so asserts would fail at end - frame_recorder.writer = MagicMock() - frame_recorder.filename_suffix = 1 - frame_recorder.last_meta = 1 - - f = core.G3Frame(core.G3FrameType.none) - f['sostream_flowcontrol'] = FlowControl.END.value - - frame_recorder.frames = [f] - frame_recorder.read_frames() - - assert frame_recorder.writer is None - assert frame_recorder.filename_suffix == 0 - assert frame_recorder.last_meta is None - - # check flow control frame discarded - assert frame_recorder.frames == [] - - def test_alive_frame_processing(self, frame_recorder): - """ALIVE frame should simply be discarded.""" - frame_recorder.reader = MagicMock() # mock the reader - - f = core.G3Frame(core.G3FrameType.none) - f['sostream_flowcontrol'] = FlowControl.ALIVE.value - - frame_recorder.frames = [f] - frame_recorder.read_frames() - - # check flow control frame discarded - assert frame_recorder.frames == [] - - def test_cleanse_frame_processing(self, frame_recorder): - """CLEANSE frame should simply be discarded.""" - frame_recorder.reader = MagicMock() # mock the reader - - f = core.G3Frame(core.G3FrameType.Observation) - f['sostream_flowcontrol'] = FlowControl.CLEANSE.value - - frame_recorder.frames = [f] - frame_recorder.read_frames() - - # check flow control frame discarded - assert frame_recorder.frames == [] - - def test_improper_flow_control_frames(self, frame_recorder): - """FlowControl frames are defined as having the key - 'sostream_flowcontrol' in them. Other flow control frame types that - don't contain 'sostream_flowcontrol' should be recorded, including none - type frames. - - """ - frame_recorder.reader = MagicMock() # mock the reader - - f = core.G3Frame(core.G3FrameType.none) - f['test'] = time.time() - - frame_recorder.frames = [f] - frame_recorder.read_frames() - - # check flow control frame not discarded - assert frame_recorder.frames != [] diff --git a/tests/default.yaml b/tests/default.yaml index 0b9680092..5af6aa848 100644 --- a/tests/default.yaml +++ b/tests/default.yaml @@ -16,7 +16,7 @@ hosts: ['--ip-address', '127.0.0.1'], ['--dwell-time-delay', 0], ['--sample-heater', False], - ['--mode', 'init']]}, + ['--mode', 'idle']]}, {'agent-class': 'Lakeshore425Agent', 'instance-id': 'LS425', 'arguments': [ @@ -31,5 +31,20 @@ hosts: ['--port', './responder'], ] }, + {'agent-class': 'PfeifferTC400Agent', + 'instance-id': 'pfeifferturboA', + 'arguments': [['--ip-address', '127.0.0.1'], + ['--port-number', '9001'], + ['--turbo-address', '1']]}, + {'agent-class': 'RotationAgent', + 'instance-id': 'rotator', + 'arguments': [['--kikusui-ip', '127.0.0.1'], + ['--kikusui-port', '2000'], + ['--pid-ip', '127.0.0.1'], + ['--pid-port', '2001'], + ['-v'], + ['--mode', 'init'], + ] + }, ] } diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml index 726386c51..9d33db309 100644 --- a/tests/docker-compose.yml +++ b/tests/docker-compose.yml @@ -8,8 +8,3 @@ services: - ./config.json:/app/crossbar/config.json environment: - PYTHONUNBUFFERED=1 - - ocs-LS372: - image: ocs-lakeshore372-simulator - ports: - - "7777:7777" diff --git a/tests/integration/test_hwp_rotation_agent_integration.py b/tests/integration/test_hwp_rotation_agent_integration.py new file mode 100644 index 000000000..e4d2af279 --- /dev/null +++ b/tests/integration/test_hwp_rotation_agent_integration.py @@ -0,0 +1,287 @@ +import pytest + +import ocs +from ocs.base import OpCode + +from ocs.testing import ( + create_agent_runner_fixture, + create_client_fixture, +) + +from integration.util import ( + create_crossbar_fixture +) + +from socs.testing.device_emulator import create_device_emulator + +pytest_plugins = ("docker_compose") + +wait_for_crossbar = create_crossbar_fixture() +run_agent = create_agent_runner_fixture( + '../agents/hwp_rotation/rotation_agent.py', 'hwp_rotation_agent', args=['--log-dir', './logs/']) +run_agent_idle = create_agent_runner_fixture( + '../agents/hwp_rotation/rotation_agent.py', 'hwp_rotation_agent', args=['--mode', 'idle', '--log-dir', './logs/']) +client = create_client_fixture('rotator') +kikusui_emu = create_device_emulator( + {'SYST:REM': ''}, relay_type='tcp', port=2000) +pid_emu = create_device_emulator( + {'*W02400000': 'W02\r'}, relay_type='tcp', port=2001) + + +@pytest.mark.integtest +def test_testing(wait_for_crossbar): + """Just a quick test to make sure we can bring up crossbar.""" + assert True + + +# This ends up hanging for some reason that I can't figure out at the moment. +# @pytest.mark.integtest +# def test_hwp_rotation_failed_connection_kikusui(wait_for_crossbar, pid_emu, run_agent_idle, client): +# resp = client.init_connection.start() +# print(resp) +# # We can't really check anything here, the agent's going to exit during the +# # init_conneciton task because it cannot connect to the Kikusui. + + +@pytest.mark.integtest +def test_hwp_rotation_failed_connection_pid(wait_for_crossbar, kikusui_emu, run_agent_idle, client): + resp = client.init_connection.start() + print(resp) + # We can't really check anything here, the agent's going to exit during the + # init_conneciton task because it cannot connect to the PID controller. + + +@pytest.mark.integtest +def test_hwp_rotation_get_direction(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'*R02': 'R02400000\r'} + pid_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.get_direction() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + # Test when in reverse + responses = {'*W02401388': 'W02\r', + '*R02': 'R02401388\r'} + pid_emu.define_responses(responses) + + client.set_direction(direction='1') + resp = client.get_direction() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_set_direction(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'*W02400000': 'W02\r', + '*W02401388': 'W02\r'} + pid_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.set_direction(direction='0') + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + resp = client.set_direction(direction='1') + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_set_pid(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'*W1700C8': 'W17\r', + '*W18003F': 'W18\r', + '*W190000': 'W19\r', + '*Z02': 'Z02\r'} + pid_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.set_pid(p=0.2, i=63, d=0) + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_tune_stop(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'*W0C83': 'W0C\r', + '*W01400000': 'W01\r', + '*R01': 'R01400000\r', + '*Z02': 'Z02\r', + '*W1700C8': 'W17\r', + '*W180000': 'W18\r', + '*W190000': 'W19\r'} + pid_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.tune_stop() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_get_freq(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'*X01': 'X010.000\r'} + pid_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.get_freq() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_set_scale(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'*W14102710': 'W14\r', + '*W03302710': 'W03\r', + '*Z02': 'Z02\r'} + pid_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.set_scale() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_declare_freq(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + client.init_connection.wait() # wait for connection to be made + resp = client.declare_freq(freq=0) + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_tune_freq(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'*W0C81': 'W0C\r', + '*W01400000': 'W01\r', + '*R01': 'R01400000\r', + '*Z02': 'Z02\r', + '*W1700C8': 'W17\r', + '*W18003F': 'W18\r', + '*W190000': 'W19\r'} + pid_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.tune_freq() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_set_on(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'OUTP ON': '', + 'OUTP?': 'on'} + kikusui_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.set_on() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_set_off(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'OUTP OFF': '', + 'OUTP?': 'off'} + kikusui_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.set_off() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_set_v(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'VOLT 1.000000': '', + 'VOLT?': '1.000000'} + kikusui_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.set_v(volt=1) + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_set_v_lim(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'VOLT:PROT 10.000000': '', + 'VOLT:PROT?': '10.000000'} + kikusui_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.set_v_lim(volt=10) + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_use_ext(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'VOLT:EXT:SOUR VOLT': '', + 'VOLT:EXT:SOUR?': 'source_name'} + kikusui_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.use_ext() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_ign_ext(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'VOLT:EXT:SOUR NONE': '', + 'VOLT:EXT:SOUR?': 'False'} + kikusui_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.ign_ext() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_hwp_rotation_iv_acq(wait_for_crossbar, kikusui_emu, pid_emu, run_agent, client): + responses = {'MEAS:VOLT?': '2', + 'MEAS:CURR?': '1'} + kikusui_emu.define_responses(responses) + + client.init_connection.wait() # wait for connection to be made + resp = client.iv_acq.start(test_mode=True) + assert resp.status == ocs.OK + + resp = client.iv_acq.wait(timeout=20) + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.SUCCEEDED.value diff --git a/tests/integration/test_ls372_agent_integration.py b/tests/integration/test_ls372_agent_integration.py index 02e75c53b..b98762f53 100644 --- a/tests/integration/test_ls372_agent_integration.py +++ b/tests/integration/test_ls372_agent_integration.py @@ -7,6 +7,8 @@ from integration.util import create_crossbar_fixture +from socs.testing.device_emulator import create_device_emulator + pytest_plugins = ("docker_compose") # Set the OCS_CONFIG_DIR so we read the local default.yaml file always @@ -19,6 +21,40 @@ client = create_client_fixture('LSASIM') wait_for_crossbar = create_crossbar_fixture() +def build_init_responses(): + values = {'*IDN?': 'LSCI,MODEL372,LSASIM,1.3', + 'SCAN?': '01,1', + 'INSET? A': '0,010,003,00,1', + 'INNAME? A': 'Input A', + 'INTYPE? A': '1,04,0,15,0,2', + 'TLIMIT? A': '+0000', + 'OUTMODE? 0': '2,6,1,0,0,001', + 'HTRSET? 0': '+120.000,8,+0000.00,1', + 'OUTMODE? 2': '4,16,1,0,0,001', + 'HTRSET? 2': '+120.000,8,+0000.00,1'} + + for i in range(1, 17): + values[f'INSET? {i}'] = '1,007,003,21,1' + values[f'INNAME? {i}'] = f'Channel {i:02}' + values[f'INTYPE? {i}'] = '0,07,1,10,0,1' + values[f'TLIMIT? {i}'] = '+0000' + + # Heaters + values.update({'RANGE? 0': '0', + 'RANGE? 2': '1', + 'STILL?': '+10.60', + 'HTR?': '+00.0005E+00'}) + + # Senor readings + values.update({'KRDG? 1': '+293.873E+00', + 'SRDG? 1': '+108.278E+00', + 'KRDG? A': '+00.0000E-03', + 'SRDG? A': '+000.000E+09'}) + + return values + +emulator = create_device_emulator(build_init_responses(), relay_type='tcp', port=7777) + @pytest.mark.integtest def test_testing(wait_for_crossbar): @@ -27,7 +63,7 @@ def test_testing(wait_for_crossbar): @pytest.mark.integtest -def test_ls372_init_lakeshore(wait_for_crossbar, run_agent, client): +def test_ls372_init_lakeshore(wait_for_crossbar, emulator, run_agent, client): resp = client.init_lakeshore() # print(resp) assert resp.status == ocs.OK @@ -36,7 +72,7 @@ def test_ls372_init_lakeshore(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_enable_control_chan(wait_for_crossbar, run_agent, client): +def test_ls372_enable_control_chan(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.enable_control_chan() assert resp.status == ocs.OK @@ -44,7 +80,7 @@ def test_ls372_enable_control_chan(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_disable_control_chan(wait_for_crossbar, run_agent, client): +def test_ls372_disable_control_chan(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.disable_control_chan() assert resp.status == ocs.OK @@ -52,7 +88,7 @@ def test_ls372_disable_control_chan(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_start_acq(wait_for_crossbar, run_agent, client): +def test_ls372_start_acq(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.acq.start(sample_heater=False, run_once=True) assert resp.status == ocs.OK @@ -65,7 +101,7 @@ def test_ls372_start_acq(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_heater_range(wait_for_crossbar, run_agent, client): +def test_ls372_set_heater_range(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_heater_range(range=1e-3, heater='sample', wait=0) assert resp.status == ocs.OK @@ -73,7 +109,7 @@ def test_ls372_set_heater_range(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_excitation_mode(wait_for_crossbar, run_agent, client): +def test_ls372_set_excitation_mode(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_excitation_mode(channel=1, mode='current') assert resp.status == ocs.OK @@ -81,7 +117,7 @@ def test_ls372_set_excitation_mode(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_excitation(wait_for_crossbar, run_agent, client): +def test_ls372_set_excitation(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_excitation(channel=1, value=1e-9) assert resp.status == ocs.OK @@ -89,17 +125,16 @@ def test_ls372_set_excitation(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_get_excitation(wait_for_crossbar, run_agent, client): +def test_ls372_get_excitation(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() - client.set_excitation(channel=1, value=1e-9) resp = client.get_excitation(channel=1) assert resp.status == ocs.OK assert resp.session['op_code'] == OpCode.SUCCEEDED.value - assert resp.session['data']['excitation'] == 1e-9 + assert resp.session['data']['excitation'] == 2e-3 @pytest.mark.integtest -def test_ls372_set_resistance_range(wait_for_crossbar, run_agent, client): +def test_ls372_set_resistance_range(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_resistance_range(channel=1, resistance_range=2) assert resp.status == ocs.OK @@ -107,17 +142,17 @@ def test_ls372_set_resistance_range(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_get_resistance_range(wait_for_crossbar, run_agent, client): +def test_ls372_get_resistance_range(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() client.set_resistance_range(channel=1, resistance_range=2) resp = client.get_resistance_range(channel=1) assert resp.status == ocs.OK assert resp.session['op_code'] == OpCode.SUCCEEDED.value - assert resp.session['data']['resistance_range'] == 2 + assert resp.session['data']['resistance_range'] == 63.2 @pytest.mark.integtest -def test_ls372_set_dwell(wait_for_crossbar, run_agent, client): +def test_ls372_set_dwell(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_dwell(channel=1, dwell=3) assert resp.status == ocs.OK @@ -125,17 +160,16 @@ def test_ls372_set_dwell(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_get_dwell(wait_for_crossbar, run_agent, client): +def test_ls372_get_dwell(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() - client.set_dwell(channel=1, dwell=3) resp = client.get_dwell(channel=1) assert resp.status == ocs.OK assert resp.session['op_code'] == OpCode.SUCCEEDED.value - assert resp.session['data']['dwell_time'] == 3 + assert resp.session['data']['dwell_time'] == 7 @pytest.mark.integtest -def test_ls372_set_pid(wait_for_crossbar, run_agent, client): +def test_ls372_set_pid(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_pid(P=40, I=2, D=0) assert resp.status == ocs.OK @@ -143,7 +177,7 @@ def test_ls372_set_pid(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_active_channel(wait_for_crossbar, run_agent, client): +def test_ls372_set_active_channel(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_active_channel(channel=1) assert resp.status == ocs.OK @@ -151,7 +185,7 @@ def test_ls372_set_active_channel(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_autoscan(wait_for_crossbar, run_agent, client): +def test_ls372_set_autoscan(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_autoscan(autoscan=True) assert resp.status == ocs.OK @@ -159,7 +193,7 @@ def test_ls372_set_autoscan(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_output_mode(wait_for_crossbar, run_agent, client): +def test_ls372_set_output_mode(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_output_mode(heater='still', mode='Off') assert resp.status == ocs.OK @@ -167,7 +201,7 @@ def test_ls372_set_output_mode(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_heater_output(wait_for_crossbar, run_agent, client): +def test_ls372_set_heater_output(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_heater_output(heater='still', output=50) assert resp.status == ocs.OK @@ -175,7 +209,7 @@ def test_ls372_set_heater_output(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_set_still_output(wait_for_crossbar, run_agent, client): +def test_ls372_set_still_output(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.set_still_output(output=50) assert resp.status == ocs.OK @@ -183,7 +217,7 @@ def test_ls372_set_still_output(wait_for_crossbar, run_agent, client): @pytest.mark.integtest -def test_ls372_get_still_output(wait_for_crossbar, run_agent, client): +def test_ls372_get_still_output(wait_for_crossbar, emulator, run_agent, client): client.init_lakeshore() resp = client.get_still_output() assert resp.status == ocs.OK diff --git a/tests/integration/test_pfeiffer_tc400_agent_integration.py b/tests/integration/test_pfeiffer_tc400_agent_integration.py new file mode 100644 index 000000000..312d16f96 --- /dev/null +++ b/tests/integration/test_pfeiffer_tc400_agent_integration.py @@ -0,0 +1,188 @@ +import os +import time +import pytest + +import ocs +from ocs.base import OpCode + +from ocs.testing import ( + create_agent_runner_fixture, + create_client_fixture, +) + +from integration.util import ( + create_crossbar_fixture +) + +from socs.testing.device_emulator import create_device_emulator + +pytest_plugins = ("docker_compose") + +# Set the OCS_CONFIG_DIR so we read the local default.yaml file always +os.environ['OCS_CONFIG_DIR'] = os.getcwd() + + +def chksum_msg(msg): + """Create and append the checksum ot a message.""" + msg += "{:03d}\r".format(sum([ord(x) for x in msg]) % 256) + return msg + + +def format_reply(data): + """Produce full message string we expect back from the TC400, provided only + the data segment. + + The driver code reads the response, and only inspects the data section of + the telegram. This function is meant to make preparing the responses a bit + easier, by putting in something somewhat sensible for the rest of the + telegram. + + Parameters: + data (str): Data string to package into telegram. + + Returns: + str: The full telegram string that is emulating the response from the + TC400. + + """ + return chksum_msg('001' + '10' + '010' + '{:02d}'.format(len(data)) + data) + + +wait_for_crossbar = create_crossbar_fixture() +run_agent = create_agent_runner_fixture( + '../agents/pfeiffer_tc400/pfeiffer_tc400_agent.py', 'tc400_agent') +client = create_client_fixture('pfeifferturboA') +emulator = create_device_emulator({}, relay_type='tcp') + + +@pytest.mark.integtest +def test_pfeiffer_tc400_init_lakeshore(wait_for_crossbar, emulator, run_agent, + client): + resp = client.init() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_turn_turbo_on(wait_for_crossbar, emulator, run_agent, + client): + client.init() + + responses = {'0011001006111111015': format_reply('111111'), # ready_turbo() + '0011002306111111019': format_reply('111111'), # turn_turbo_motor_on() + } + emulator.define_responses(responses) + + resp = client.turn_turbo_on() + print(resp) + assert resp.status == ocs.OK + print(resp.session) + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_turn_turbo_on_not_ready(wait_for_crossbar, emulator, + run_agent, client): + client.init() + + responses = {'0011001006111111015': format_reply('000000'), # ready_turbo() + '0011002306111111019': format_reply('111111'), # turn_turbo_motor_on() + } + emulator.define_responses(responses) + + resp = client.turn_turbo_on() + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.FAILED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_turn_turbo_on_failed(wait_for_crossbar, emulator, + run_agent, client): + client.init() + + responses = {'0011001006111111015': format_reply('111111'), # ready_turbo() + '0011002306111111019': format_reply('000000'), # turn_turbo_motor_on() + } + emulator.define_responses(responses) + + resp = client.turn_turbo_on() + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.FAILED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_turn_turbo_off(wait_for_crossbar, emulator, run_agent, + client): + client.init() + + responses = {'0011002306000000013': format_reply('111111'), # turn_turbo_motor_on() + '0011001006000000009': format_reply('111111'), # unready_turbo() + } + emulator.define_responses(responses) + + resp = client.turn_turbo_off() + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_turn_turbo_off_failed(wait_for_crossbar, emulator, + run_agent, client): + client.init() + + responses = {'0011002306000000013': format_reply('000000'), # turn_turbo_motor_on() + '0011001006000000009': format_reply('111111'), # unready_turbo() + } + emulator.define_responses(responses) + + resp = client.turn_turbo_off() + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.FAILED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_turn_turbo_off_failed_unready(wait_for_crossbar, + emulator, run_agent, + client): + client.init() + + responses = {'0011002306000000013': format_reply('111111'), # turn_turbo_motor_on() + '0011001006000000009': format_reply('000000'), # unready_turbo() + } + emulator.define_responses(responses) + + resp = client.turn_turbo_off() + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.FAILED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_acknowledge_turbo_errors(wait_for_crossbar, emulator, + run_agent, client): + client.init() + + responses = {'0011000906111111023': format_reply('111111'), # acknowledge_turbo_errors() + } + emulator.define_responses(responses) + + resp = client.acknowledge_turbo_errors() + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.SUCCEEDED.value + + +@pytest.mark.integtest +def test_pfeiffer_tc400_acq(wait_for_crossbar, emulator, run_agent, client): + client.init() + + responses = {'0010034602=?108': format_reply('000300'), # get_turbo_motor_temperature() + '0010030902=?107': format_reply('000800'), # get_turbo_actual_rotation_speed() + '0010030302=?101': format_reply('Err001'), # get_turbo_error_code() + } + emulator.define_responses(responses) + + resp = client.acq.start(test_mode=True, wait=0) + resp = client.acq.wait() + assert resp.status == ocs.OK + assert resp.session['op_code'] == OpCode.SUCCEEDED.value