diff --git a/fpga/common/tb/tx_scheduler_rr/Makefile b/fpga/common/tb/tx_scheduler_rr/Makefile new file mode 100644 index 000000000..1c233759b --- /dev/null +++ b/fpga/common/tb/tx_scheduler_rr/Makefile @@ -0,0 +1,61 @@ +# SPDX-License-Identifier: BSD-2-Clause-Views +# Copyright (c) 2020-2023 The Regents of the University of California + +TOPLEVEL_LANG = verilog + +SIM ?= icarus +WAVES ?= 0 + +COCOTB_HDL_TIMEUNIT = 1ns +COCOTB_HDL_TIMEPRECISION = 1ps + +DUT = tx_scheduler_rr +TOPLEVEL = $(DUT) +MODULE = test_$(DUT) +VERILOG_SOURCES += ../../rtl/$(DUT).v +VERILOG_SOURCES += ../../lib/axis/rtl/axis_fifo.v +VERILOG_SOURCES += ../../lib/axis/rtl/priority_encoder.v + +# module parameters +export PARAM_AXIL_DATA_WIDTH := 32 +export PARAM_AXIL_ADDR_WIDTH := 16 +export PARAM_AXIL_STRB_WIDTH := $(shell expr $(PARAM_AXIL_DATA_WIDTH) / 8 ) +export PARAM_LEN_WIDTH := 16 +export PARAM_REQ_TAG_WIDTH := 8 +export PARAM_OP_TABLE_SIZE := 16 +export PARAM_QUEUE_INDEX_WIDTH := 6 +export PARAM_PIPELINE := 2 +export PARAM_SCHED_CTRL_ENABLE := 1 + +ifeq ($(SIM), icarus) + PLUSARGS += -fst + + COMPILE_ARGS += $(foreach v,$(filter PARAM_%,$(.VARIABLES)),-P $(TOPLEVEL).$(subst PARAM_,,$(v))=$($(v))) + + ifeq ($(WAVES), 1) + VERILOG_SOURCES += iverilog_dump.v + COMPILE_ARGS += -s iverilog_dump + endif +else ifeq ($(SIM), verilator) + COMPILE_ARGS += -Wno-SELRANGE -Wno-WIDTH + + COMPILE_ARGS += $(foreach v,$(filter PARAM_%,$(.VARIABLES)),-G$(subst PARAM_,,$(v))=$($(v))) + + ifeq ($(WAVES), 1) + COMPILE_ARGS += --trace-fst + endif +endif + +include $(shell cocotb-config --makefiles)/Makefile.sim + +iverilog_dump.v: + echo 'module iverilog_dump();' > $@ + echo 'initial begin' >> $@ + echo ' $$dumpfile("$(TOPLEVEL).fst");' >> $@ + echo ' $$dumpvars(0, $(TOPLEVEL));' >> $@ + echo 'end' >> $@ + echo 'endmodule' >> $@ + +clean:: + @rm -rf iverilog_dump.v + @rm -rf dump.fst $(TOPLEVEL).fst diff --git a/fpga/common/tb/tx_scheduler_rr/test_tx_scheduler_rr.py b/fpga/common/tb/tx_scheduler_rr/test_tx_scheduler_rr.py new file mode 100644 index 000000000..05c550604 --- /dev/null +++ b/fpga/common/tb/tx_scheduler_rr/test_tx_scheduler_rr.py @@ -0,0 +1,380 @@ +#!/usr/bin/env python +# SPDX-License-Identifier: BSD-2-Clause-Views +# Copyright (c) 2020-2023 The Regents of the University of California + +import itertools +import logging +import os +import struct + +import scapy.utils +from scapy.layers.l2 import Ether +from scapy.layers.inet import IP, UDP + +import cocotb_test.simulator +import pytest + +import cocotb +from cocotb.clock import Clock +from cocotb.triggers import RisingEdge +from cocotb.regression import TestFactory + +from cocotbext.axi import AxiLiteBus, AxiLiteMaster +from cocotbext.axi.stream import define_stream + + +TxReqBus, TxReqTransaction, TxReqSource, TxReqSink, TxReqMonitor = define_stream("TxReq", + signals=["queue", "tag", "valid"], + optional_signals=["ready"] +) + + +TxReqStatusBus, TxReqStatusTransaction, TxReqStatusSource, TxReqStatusSink, TxReqStatusMonitor = define_stream("TxReqStatus", + signals=["len", "tag", "valid"], + optional_signals=["ready"] +) + + +DoorbellBus, DoorbellTransaction, DoorbellSource, DoorbellSink, DoorbellMonitor = define_stream("Doorbell", + signals=["queue", "valid"], + optional_signals=["ready"] +) + + +CtrlBus, CtrlTransaction, CtrlSource, CtrlSink, CtrlMonitor = define_stream("Ctrl", + signals=["queue", "enable", "valid"], + optional_signals=["ready"] +) + + +class TB(object): + def __init__(self, dut): + self.dut = dut + + self.log = logging.getLogger("cocotb.tb") + self.log.setLevel(logging.DEBUG) + + cocotb.start_soon(Clock(dut.clk, 4, units="ns").start()) + + self.tx_req_sink = TxReqSink(TxReqBus.from_prefix(dut, "m_axis_tx_req"), dut.clk, dut.rst) + self.tx_req_status_source = TxReqStatusSource(TxReqStatusBus.from_prefix(dut, "s_axis_tx_req_status"), dut.clk, dut.rst) + + self.doorbell_source = DoorbellSource(DoorbellBus.from_prefix(dut, "s_axis_doorbell"), dut.clk, dut.rst) + + self.ctrl_source = CtrlSource(CtrlBus.from_prefix(dut, "s_axis_sched_ctrl"), dut.clk, dut.rst) + + self.axil_master = AxiLiteMaster(AxiLiteBus.from_prefix(dut, "s_axil"), dut.clk, dut.rst) + + dut.enable.setimmediatevalue(0) + + def set_idle_generator(self, generator=None): + if generator: + self.tx_req_status_source.set_pause_generator(generator()) + + def set_backpressure_generator(self, generator=None): + if generator: + self.tx_req_sink.set_pause_generator(generator()) + + async def reset(self): + self.dut.rst.setimmediatevalue(0) + await RisingEdge(self.dut.clk) + await RisingEdge(self.dut.clk) + self.dut.rst.value = 1 + await RisingEdge(self.dut.clk) + await RisingEdge(self.dut.clk) + self.dut.rst.value = 0 + await RisingEdge(self.dut.clk) + await RisingEdge(self.dut.clk) + + +async def run_test_config(dut): + + tb = TB(dut) + + await tb.reset() + + assert await tb.axil_master.read_dword(0*4) == 0 + + await tb.axil_master.write_dword(0*4, 3) + + assert await tb.axil_master.read_dword(0*4) == 3 + + await RisingEdge(dut.clk) + await RisingEdge(dut.clk) + + +async def run_test_single(dut, idle_inserter=None, backpressure_inserter=None): + + tb = TB(dut) + + await tb.reset() + + tb.set_idle_generator(idle_inserter) + tb.set_backpressure_generator(backpressure_inserter) + + dut.enable.value = 1 + + await tb.axil_master.write_dword(0*4, 3) + + await tb.doorbell_source.send(DoorbellTransaction(queue=0)) + + for k in range(200): + await RisingEdge(dut.clk) + + for k in range(10): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=1000, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + for k in range(200): + await RisingEdge(dut.clk) + + while not tb.tx_req_sink.empty(): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + await RisingEdge(dut.clk) + await RisingEdge(dut.clk) + + +async def run_test_multiple(dut, idle_inserter=None, backpressure_inserter=None): + + tb = TB(dut) + + await tb.reset() + + tb.set_idle_generator(idle_inserter) + tb.set_backpressure_generator(backpressure_inserter) + + dut.enable.value = 1 + + for k in range(10): + await tb.axil_master.write_dword(k*4, 3) + + for k in range(10): + await tb.doorbell_source.send(DoorbellTransaction(queue=k)) + + for k in range(200): + await RisingEdge(dut.clk) + + for k in range(100): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == k % 10 + + status = TxReqStatusTransaction(len=1000, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + for k in range(10): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + for k in range(200): + await RisingEdge(dut.clk) + + while not tb.tx_req_sink.empty(): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + await RisingEdge(dut.clk) + await RisingEdge(dut.clk) + + +async def run_test_doorbell(dut, idle_inserter=None, backpressure_inserter=None): + + tb = TB(dut) + + await tb.reset() + + tb.set_idle_generator(idle_inserter) + tb.set_backpressure_generator(backpressure_inserter) + + dut.enable.value = 1 + + await tb.axil_master.write_dword(0*4, 3) + + await tb.doorbell_source.send(DoorbellTransaction(queue=0)) + + for k in range(200): + await RisingEdge(dut.clk) + + for k in range(10): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=1000, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + for k in range(200): + await RisingEdge(dut.clk) + + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + await tb.doorbell_source.send(DoorbellTransaction(queue=0)) + + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + for k in range(10): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=1000, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + for k in range(200): + await RisingEdge(dut.clk) + + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + for k in range(200): + await RisingEdge(dut.clk) + + while not tb.tx_req_sink.empty(): + tx_req = await tb.tx_req_sink.recv() + tb.log.info("TX request: %s", tx_req) + + assert tx_req.queue == 0 + + status = TxReqStatusTransaction(len=0, tag=tx_req.tag) + tb.log.info("TX request status: %s", status) + await tb.tx_req_status_source.send(status) + + await RisingEdge(dut.clk) + await RisingEdge(dut.clk) + + +def cycle_pause(): + return itertools.cycle([1, 1, 1, 0]) + + +if cocotb.SIM_NAME: + + factory = TestFactory(run_test_config) + factory.generate_tests() + + for test in [ + run_test_single, + run_test_multiple, + run_test_doorbell + ]: + + factory = TestFactory(test) + factory.add_option("idle_inserter", [None, cycle_pause]) + factory.add_option("backpressure_inserter", [None, cycle_pause]) + factory.generate_tests() + + +# cocotb-test + +tests_dir = os.path.dirname(__file__) +rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl')) +lib_dir = os.path.abspath(os.path.join(rtl_dir, '..', 'lib')) +axi_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axi', 'rtl')) +axis_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axis', 'rtl')) +eth_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'eth', 'rtl')) +pcie_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'pcie', 'rtl')) + + +def test_tx_scheduler_rr(request): + dut = "tx_scheduler_rr" + module = os.path.splitext(os.path.basename(__file__))[0] + toplevel = dut + + verilog_sources = [ + os.path.join(rtl_dir, f"{dut}.v"), + os.path.join(axis_rtl_dir, "axis_fifo.v"), + os.path.join(axis_rtl_dir, "priority_encoder.v"), + ] + + parameters = {} + + parameters['AXIL_DATA_WIDTH'] = 32 + parameters['AXIL_ADDR_WIDTH'] = 16 + parameters['AXIL_STRB_WIDTH'] = parameters['AXIL_DATA_WIDTH'] // 8 + parameters['LEN_WIDTH'] = 16 + parameters['REQ_TAG_WIDTH'] = 8 + parameters['OP_TABLE_SIZE'] = 16 + parameters['QUEUE_INDEX_WIDTH'] = 6 + parameters['PIPELINE'] = 2 + parameters['SCHED_CTRL_ENABLE'] = 1 + + extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()} + + sim_build = os.path.join(tests_dir, "sim_build", + request.node.name.replace('[', '-').replace(']', '')) + + cocotb_test.simulator.run( + python_search=[tests_dir], + verilog_sources=verilog_sources, + toplevel=toplevel, + module=module, + parameters=parameters, + sim_build=sim_build, + extra_env=extra_env, + )