Skip to content

Commit e441ce4

Browse files
committed
DO NOT MERGE: add two versions of dram stream writer
1 parent 0b86e23 commit e441ce4

File tree

1 file changed

+67
-3
lines changed

1 file changed

+67
-3
lines changed

naps/cores/dram_packet_ringbuffer/stream_if.py

Lines changed: 67 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,9 @@
33
from naps.cores import AxiReader, AxiWriter, if_none_get_zynq_hp_port, StreamInfo, LastWrapper, StreamTee
44
from naps import PacketizedStream, BasicStream, stream_transformer, StatusSignal
55

6-
__all__ = ["DramPacketRingbufferStreamWriter", "DramPacketRingbufferStreamReader"]
6+
__all__ = ["DramPacketRingbufferStreamWriter", "DramPacketRingbufferStreamWriterV2", "DramPacketRingbufferStreamReader"]
77

8-
9-
class DramPacketRingbufferStreamWriter(Elaboratable):
8+
class DramPacketRingbufferStreamWriterV2(Elaboratable):
109
def __init__(
1110
self,
1211
input: PacketizedStream,
@@ -70,6 +69,71 @@ def elaborate(self, platform):
7069
return m
7170

7271

72+
73+
class DramPacketRingbufferStreamWriter(Elaboratable):
74+
def __init__(
75+
self,
76+
input: PacketizedStream,
77+
max_packet_size, n_buffers, base_address=0x0f80_0000,
78+
axi=None,
79+
):
80+
self.max_packet_size = max_packet_size
81+
self.base_address = base_address
82+
self.n_buffers = n_buffers
83+
self.axi = axi
84+
85+
self.buffer_base_list_cpu = [base_address + max_packet_size * i for i in range(n_buffers)]
86+
self.buffer_base_list = Array(self.buffer_base_list_cpu)
87+
self.buffer_level_list = Array([Signal(range(max_packet_size), name=f'buffer{i}_level') for i in range(n_buffers)])
88+
self.current_write_buffer = Signal(range(n_buffers))
89+
90+
assert hasattr(input, "last")
91+
self.input = input
92+
93+
self.overflowed_buffers = StatusSignal(32)
94+
self.buffers_written = StatusSignal(32)
95+
96+
def elaborate(self, platform):
97+
m = Module()
98+
99+
axi = if_none_get_zynq_hp_port(self.axi, m, platform)
100+
assert len(self.input.payload) <= axi.data_bits
101+
102+
tee = m.submodules.tee = StreamTee(self.input)
103+
104+
data_stream = BasicStream(self.input.payload.shape())
105+
m.d.comb += data_stream.connect_upstream(tee.get_output(), allow_partial=True)
106+
107+
transformer_input = tee.get_output()
108+
address_stream = BasicStream(axi.write_address.payload.shape())
109+
address_offset = Signal.like(axi.write_address.payload)
110+
is_in_overflow = Signal()
111+
stream_transformer(transformer_input, address_stream, m, latency=0, handle_out_of_band=False)
112+
113+
with m.If(transformer_input.ready & transformer_input.valid):
114+
m.d.sync += self.buffer_level_list[self.current_write_buffer].eq(address_offset + axi.data_bytes)
115+
with m.If(transformer_input.last):
116+
m.d.sync += is_in_overflow.eq(0)
117+
next_buffer = (self.current_write_buffer + 1) % self.n_buffers
118+
m.d.sync += address_offset.eq(0)
119+
m.d.sync += self.current_write_buffer.eq(next_buffer)
120+
m.d.sync += self.buffers_written.eq(self.buffers_written + 1)
121+
with m.Else():
122+
with m.If((address_offset + axi.data_bytes < self.max_packet_size)):
123+
m.d.sync += address_offset.eq(address_offset + axi.data_bytes)
124+
with m.Else():
125+
with m.If(~is_in_overflow):
126+
m.d.sync += is_in_overflow.eq(1)
127+
m.d.sync += self.overflowed_buffers.eq(self.overflowed_buffers + 1)
128+
m.d.comb += address_stream.payload.eq(address_offset + self.buffer_base_list[self.current_write_buffer])
129+
130+
m.submodules.writer = AxiWriter(address_stream, data_stream, axi)
131+
132+
m.submodules.input_stream_info = StreamInfo(self.input)
133+
134+
return m
135+
136+
73137
class DramPacketRingbufferStreamReader(Elaboratable):
74138
def __init__(self, writer: DramPacketRingbufferStreamWriter, data_width=64, length_fifo_depth=1, axi=None):
75139
self.writer = writer

0 commit comments

Comments
 (0)