@@ -25,13 +25,69 @@ use std::f64::consts::PI;
25
25
const ALMOST_ZERO : f64 = 0.000001 ;
26
26
const TAPS : usize = 48 ;
27
27
28
+ trait ArrayBuf < Item > : std:: borrow:: BorrowMut < [ Item ] > {
29
+ const SIZE : usize ;
30
+ }
31
+
32
+ impl < Item > ArrayBuf < Item > for [ Item ; 24 ] {
33
+ const SIZE : usize = 24 ;
34
+ }
35
+
36
+ impl < Item > ArrayBuf < Item > for [ Item ; 12 ] {
37
+ const SIZE : usize = 12 ;
38
+ }
39
+
40
+ /// A ciruclar buffer offering fixed-length continous views into data
41
+ /// This is enabled by writing data twice, also to a "shadow"-buffer following the primary buffer,
42
+ /// The tradeoff is writing all data twice, the gain is giving the compiler continuous view with
43
+ /// predictable length into the data, unlocking some more optimizations
44
+ #[ derive( Clone , Debug ) ]
45
+ struct RollingBuffer < A , T > {
46
+ buf : [ T ; TAPS ] ,
47
+ position : usize ,
48
+ _phantom : std:: marker:: PhantomData < A > ,
49
+ }
50
+
51
+ impl < A : ArrayBuf < T > , T : Default + Copy > RollingBuffer < A , T > {
52
+ fn new ( ) -> Self {
53
+ assert ! ( A :: SIZE * 2 <= TAPS ) ;
54
+
55
+ let buf: [ T ; TAPS ] = [ Default :: default ( ) ; TAPS ] ;
56
+
57
+ Self {
58
+ buf,
59
+ position : A :: SIZE ,
60
+ _phantom : Default :: default ( ) ,
61
+ }
62
+ }
63
+
64
+ #[ inline( always) ]
65
+ fn push_front ( & mut self , v : T ) {
66
+ if self . position == 0 {
67
+ self . position = A :: SIZE - 1 ;
68
+ } else {
69
+ self . position -= 1 ;
70
+ }
71
+ unsafe {
72
+ * self . buf . get_unchecked_mut ( self . position ) = v;
73
+ * self . buf . get_unchecked_mut ( self . position + A :: SIZE ) = v;
74
+ }
75
+ }
76
+ }
77
+
78
+ impl < A , T > AsRef < A > for RollingBuffer < A , T > {
79
+ #[ inline( always) ]
80
+ fn as_ref ( & self ) -> & A {
81
+ unsafe { std:: mem:: transmute ( self . buf . get_unchecked ( self . position ) ) }
82
+ }
83
+ }
84
+
28
85
macro_rules! interp_impl {
29
86
( $name: ident, $factor: expr ) => {
30
87
#[ derive( Debug , Clone ) ]
31
88
pub struct $name<F : FrameAccumulator > {
32
89
filter: [ [ f32 ; $factor] ; ( TAPS / $factor) ] ,
33
- buffer: [ F ; ( TAPS / $factor) ] ,
34
- buffer_pos: usize ,
90
+ buffer: RollingBuffer <[ F ; TAPS / $factor] , F >,
35
91
}
36
92
37
93
impl <F > Default for $name<F >
@@ -72,40 +128,29 @@ macro_rules! interp_impl {
72
128
73
129
Self {
74
130
filter,
75
- buffer: Default :: default ( ) ,
76
- buffer_pos: ( TAPS / $factor) - 1 ,
131
+ buffer: RollingBuffer :: new( ) ,
77
132
}
78
133
}
79
134
80
135
pub fn push( & mut self , frame: F ) -> [ F ; $factor] {
81
136
// Write in Frames in reverse, to enable forward-scanning with filter
82
- self . buffer_pos = ( self . buffer_pos + self . buffer. len( ) - 1 ) % self . buffer. len( ) ;
83
- self . buffer[ self . buffer_pos] = frame;
137
+ self . buffer. push_front( frame) ;
84
138
85
139
let mut output: [ F ; $factor] = Default :: default ( ) ;
86
140
87
- let mut filterp = 0 ;
141
+ let buf = self . buffer . as_ref ( ) ;
88
142
89
- for input_frame in & self . buffer[ self . buffer_pos..] {
90
- let filter_coeffs = & self . filter[ filterp] ;
91
- for ( output_frame, coeff) in Iterator :: zip( output. iter_mut( ) , filter_coeffs) {
92
- output_frame. scale_add( input_frame, * coeff) ;
93
- }
94
- filterp += 1 ;
95
- }
96
- for input_frame in & self . buffer[ ..self . buffer_pos] {
97
- let filter_coeffs = & self . filter[ filterp] ;
143
+ for ( filter_coeffs, input_frame) in Iterator :: zip( self . filter. iter( ) , buf) {
98
144
for ( output_frame, coeff) in Iterator :: zip( output. iter_mut( ) , filter_coeffs) {
99
145
output_frame. scale_add( input_frame, * coeff) ;
100
146
}
101
- filterp += 1 ;
102
147
}
103
148
104
149
output
105
150
}
106
151
107
152
pub fn reset( & mut self ) {
108
- self . buffer = Default :: default ( ) ;
153
+ self . buffer = RollingBuffer :: new ( ) ;
109
154
}
110
155
}
111
156
} ;
0 commit comments