@@ -1136,6 +1136,22 @@ impl atomic_buf {
11361136 . expect ( "An atomic_buf is always aligned" )
11371137 }
11381138
1139+ /// Index into this buffer at a generalized, potentially skewed, typed index.
1140+ ///
1141+ /// # Panics
1142+ ///
1143+ /// This method panics if the index is out-of-range.
1144+ pub fn index < T > ( & self , index : TexelRange < T > ) -> AtomicSliceRef < ' _ , T > {
1145+ let scale = index. texel . align ( ) ;
1146+
1147+ AtomicSliceRef {
1148+ buf : self ,
1149+ start : scale * index. start_per_align ,
1150+ end : scale * index. end_per_align ,
1151+ texel : index. texel ,
1152+ }
1153+ }
1154+
11391155 /// Apply a mapping function to some elements.
11401156 ///
11411157 /// The indices `src` and `dest` are indices as if the slice were interpreted as `[P]` or `[Q]`
@@ -1312,8 +1328,8 @@ impl<'lt, P> AtomicSliceRef<'lt, P> {
13121328 } else {
13131329 Some ( AtomicSliceRef {
13141330 buf : self . buf ,
1315- start : start * self . texel . size ( ) ,
1316- end : end * self . texel . size ( ) ,
1331+ start : self . start + start * self . texel . size ( ) ,
1332+ end : self . start + end * self . texel . size ( ) ,
13171333 texel : self . texel ,
13181334 } )
13191335 }
@@ -1372,6 +1388,12 @@ impl<'lt, P> AtomicSliceRef<'lt, P> {
13721388 }
13731389 }
13741390
1391+ pub ( crate ) fn as_ptr_range ( self ) -> core:: ops:: Range < * mut P > {
1392+ let base = self . buf . 0 . as_ptr_range ( ) ;
1393+ ( ( base. start as * mut u8 ) . wrapping_add ( self . start ) as * mut P )
1394+ ..( ( base. start as * mut u8 ) . wrapping_add ( self . end ) as * mut P )
1395+ }
1396+
13751397 /// Equivalent of [`core::slice::from_ref`] but we have no mutable analogue.
13761398 pub ( crate ) fn from_ref ( value : AtomicRef < ' lt , P > ) -> Self {
13771399 AtomicSliceRef {
@@ -1443,11 +1465,36 @@ impl<T> TexelRange<T> {
14431465 } )
14441466 }
14451467
1468+ /// Construct from a range of bytes.
1469+ ///
1470+ /// The range must be aligned to the type `T` and the length of the range must be a multiple of
1471+ /// the size. However, in contrast to [`Self::new`] it may be skewed with regards to the size
1472+ /// of the type. For instance, a slice `[u8; 3]` may begin one byte into the underlying buffer.
1473+ ///
1474+ /// Note that a range with its end before the start is interpreted as an empty range and only
1475+ /// has to fulfill the alignment requirement for its start byte.
1476+ ///
1477+ /// # Examples
1478+ ///
1479+ /// ```
1480+ /// use image_texel::texels::{U16, TexelRange};
1481+ ///
1482+ /// assert!(TexelRange::from_byte_range(U16, 0..4).is_some());
1483+ /// // Misaligned.
1484+ /// assert!(TexelRange::from_byte_range(U16, 1..5).is_none());
1485+ /// // Okay.
1486+ /// assert!(TexelRange::from_byte_range(U16.array::<4>(), 2..10).is_some());
1487+ /// // Okay but empty.
1488+ /// assert!(TexelRange::from_byte_range(U16.array::<4>(), 2..0).is_some());
1489+ /// ```
14461490 pub fn from_byte_range ( texel : Texel < T > , range : ops:: Range < usize > ) -> Option < Self > {
14471491 let start_byte = range. start ;
1448- let end_byte = range. end ;
1492+ let end_byte = range. end . max ( start_byte ) ;
14491493
1450- if start_byte % texel. align ( ) != 0 || end_byte % texel. align ( ) != 0 {
1494+ if start_byte % texel. align ( ) != 0
1495+ || end_byte % texel. align ( ) != 0
1496+ || ( end_byte - start_byte) % texel. size ( ) != 0
1497+ {
14511498 return None ;
14521499 }
14531500
@@ -1665,7 +1712,6 @@ mod tests {
16651712 let buffer = AtomicBuffer :: with_buffer ( initial_state) ;
16661713 // And receive all the results in this shared copy of our buffer.
16671714 let output_tap = buffer. clone ( ) ;
1668- // assert!(buffer.ptr_eq(&output_tap));
16691715
16701716 // Map those numbers in-place.
16711717 buffer. map_within ( ..LEN , 0 , |n : u32 | n as u8 , U32 , U8 ) ;
@@ -1900,4 +1946,142 @@ mod tests {
19001946 U8 . load_atomic_slice ( lhs. as_texels ( U8 ) , & mut buffer) ;
19011947 assert ! ( * lhs == buffer[ ..] , "Must be equal with its data" ) ;
19021948 }
1949+
1950+ #[ test]
1951+ fn atomic_with_u8 ( ) {
1952+ // Check that writing and reading works at different offsets.
1953+ for offset in 0 ..MAX_ALIGN {
1954+ let slice = [ const { MaxAtomic :: zero ( ) } ; 4 ] ;
1955+ let atomic = atomic_buf:: new ( & slice[ ..] ) ;
1956+
1957+ let mut iota = 0 ;
1958+ let data = [ ( ) ; 3 * MAX_ALIGN ] . map ( move |_| {
1959+ let n = iota;
1960+ iota += 1 ;
1961+ n
1962+ } ) ;
1963+
1964+ let target = atomic. as_texels ( U8 ) . index ( offset..) . index ( ..3 * MAX_ALIGN ) ;
1965+ U8 . store_atomic_slice ( target, & data[ ..] ) ;
1966+
1967+ let mut check = [ 0 ; 3 * MAX_ALIGN ] ;
1968+ U8 . load_atomic_slice ( target, & mut check[ ..] ) ;
1969+
1970+ let cells = [ const { core:: cell:: Cell :: new ( 0 ) } ; 3 * MAX_ALIGN ] ;
1971+ U8 . load_atomic_to_cells ( target, & cells[ ..] ) ;
1972+
1973+ assert_eq ! ( data, check) ;
1974+ assert_eq ! ( data, cells. map( |x| x. into_inner( ) ) ) ;
1975+
1976+ let mut check = [ 0 ; 4 * MAX_ALIGN ] ;
1977+ U8 . load_atomic_slice ( atomic. as_texels ( U8 ) , & mut check[ ..] ) ;
1978+
1979+ assert_eq ! ( data, check[ offset..] [ ..3 * MAX_ALIGN ] , "offset {offset}" ) ;
1980+ }
1981+ }
1982+
1983+ #[ test]
1984+ fn atomic_with_u16 ( ) {
1985+ use crate :: texels:: U16 ;
1986+
1987+ // Check that writing and reading works at different offsets.
1988+ for offset in 0 ..MAX_ALIGN / 2 {
1989+ let slice = [ const { MaxAtomic :: zero ( ) } ; 4 ] ;
1990+ let atomic = atomic_buf:: new ( & slice[ ..] ) ;
1991+
1992+ let mut iota = 0 ;
1993+ let data = [ ( ) ; 3 * MAX_ALIGN / 2 ] . map ( move |_| {
1994+ let n = iota;
1995+ iota += 1 ;
1996+ n
1997+ } ) ;
1998+
1999+ let target = atomic
2000+ . as_texels ( U16 )
2001+ . index ( offset..)
2002+ . index ( ..3 * MAX_ALIGN / 2 ) ;
2003+ U16 . store_atomic_slice ( target, & data[ ..] ) ;
2004+
2005+ let mut check = [ 0 ; 3 * MAX_ALIGN / 2 ] ;
2006+ U16 . load_atomic_slice ( target, & mut check[ ..] ) ;
2007+
2008+ let cells = [ const { core:: cell:: Cell :: new ( 0 ) } ; 3 * MAX_ALIGN / 2 ] ;
2009+ U16 . load_atomic_to_cells ( target, & cells[ ..] ) ;
2010+
2011+ assert_eq ! ( data, check) ;
2012+ assert_eq ! ( data, cells. map( |x| x. into_inner( ) ) ) ;
2013+ }
2014+ }
2015+
2016+ #[ test]
2017+ fn atomic_from_cells ( ) {
2018+ for offset in 0 ..4 {
2019+ let data = [ const { MaxAtomic :: zero ( ) } ; 1 ] ;
2020+ let lhs = atomic_buf:: new ( & data[ 0 ..1 ] ) ;
2021+
2022+ let data = [ const { MaxCell :: zero ( ) } ; 1 ] ;
2023+ let rhs = cell_buf:: new ( & data[ 0 ..1 ] ) ;
2024+
2025+ // Create a value that checks we write to the correct bytes.
2026+ let source = rhs. as_texels ( U8 ) . as_slice_of_cells ( ) ;
2027+ U8 . store_cell_slice ( & source[ 4 ..8 ] , & [ 0x84 ; 4 ] ) ;
2028+ U8 . store_cell_slice ( & source[ 2 ..4 ] , & [ 1 , 2 ] ) ;
2029+ let source = & source[ ..8 - offset] ;
2030+ // Initialize the first 8 bytes of the atomic.
2031+ U8 . store_atomic_from_cells ( lhs. as_texels ( U8 ) . index ( offset..8 ) , source) ;
2032+
2033+ let mut buffer = [ 0x42 ; mem:: size_of :: < MaxCell > ( ) ] ;
2034+ U8 . load_atomic_slice ( lhs. as_texels ( U8 ) , & mut buffer) ;
2035+
2036+ assert ! (
2037+ buffer[ ..offset] . iter( ) . all( |& x| x == 0 ) ,
2038+ "Must still be unset" ,
2039+ ) ;
2040+
2041+ assert ! (
2042+ buffer[ offset..] [ ..4 ] == [ 0 , 0 , 1 , 2 ] ,
2043+ "Must contain the data" ,
2044+ ) ;
2045+
2046+ assert ! (
2047+ buffer[ offset..8 ] [ 4 ..] . iter( ) . all( |& x| x == 0x84 ) ,
2048+ "Must be initialized by tail {:?}" ,
2049+ & buffer[ offset..] [ 4 ..] ,
2050+ ) ;
2051+ }
2052+ }
2053+
2054+ #[ test]
2055+ fn atomic_to_cells ( ) {
2056+ for offset in 0 ..4 {
2057+ let data = [ const { MaxAtomic :: zero ( ) } ; 1 ] ;
2058+ let lhs = atomic_buf:: new ( & data[ 0 ..1 ] ) ;
2059+
2060+ let data = [ const { MaxCell :: zero ( ) } ; 1 ] ;
2061+ let rhs = cell_buf:: new ( & data[ 0 ..1 ] ) ;
2062+
2063+ U8 . store_atomic_slice ( lhs. as_texels ( U8 ) . index ( 4 ..8 ) , & [ 0x84 ; 4 ] ) ;
2064+ U8 . store_atomic_slice ( lhs. as_texels ( U8 ) . index ( offset..) . index ( ..4 ) , & [ 0 , 0 , 1 , 2 ] ) ;
2065+
2066+ // Create a value that checks we write to the correct bytes.
2067+ let target = rhs. as_texels ( U8 ) . as_slice_of_cells ( ) ;
2068+ // Initialize the first 8 bytes of the atomic.
2069+ U8 . load_atomic_to_cells ( lhs. as_texels ( U8 ) . index ( offset..8 ) , & target[ ..8 - offset] ) ;
2070+
2071+ let mut buffer = [ 0x42 ; mem:: size_of :: < MaxCell > ( ) ] ;
2072+ U8 . load_cell_slice ( target, & mut buffer) ;
2073+
2074+ assert ! (
2075+ buffer[ ..4 ] == [ 0 , 0 , 1 , 2 ] ,
2076+ "Must contain the data {:?}" ,
2077+ & buffer[ ..4 ] ,
2078+ ) ;
2079+
2080+ assert ! (
2081+ buffer[ ..8 - offset] [ 4 ..] . iter( ) . all( |& x| x == 0x84 ) ,
2082+ "Must be initialized by tail {:?}" ,
2083+ & buffer[ ..8 - offset] [ 4 ..] ,
2084+ ) ;
2085+ }
2086+ }
19032087}
0 commit comments