@@ -54,7 +54,7 @@ pub struct ReentrantMutex {
54
54
55
55
impl ReentrantMutex {
56
56
pub unsafe fn uninitialized ( ) -> ReentrantMutex {
57
- ReentrantMutex {
57
+ ReentrantMutex {
58
58
lock : UnsafeCell :: new ( MaybeUninit :: uninit ( ) ) ,
59
59
recursion : UnsafeCell :: new ( MaybeUninit :: uninit ( ) )
60
60
}
@@ -67,9 +67,9 @@ impl ReentrantMutex {
67
67
68
68
pub unsafe fn try_lock ( & self ) -> bool {
69
69
// Attempt to acquire the lock.
70
- let lock = self . lock . get ( ) ;
71
- let recursion = self . recursion . get ( ) ;
72
- if let Err ( old) = ( * ( * lock) . as_mut_ptr ( ) ) . compare_exchange (
70
+ let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
71
+ let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
72
+ if let Err ( old) = ( * lock) . compare_exchange (
73
73
abi:: LOCK_UNLOCKED . 0 ,
74
74
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
75
75
Ordering :: Acquire ,
@@ -78,14 +78,14 @@ impl ReentrantMutex {
78
78
// If we fail to acquire the lock, it may be the case
79
79
// that we've already acquired it and may need to recurse.
80
80
if old & !abi:: LOCK_KERNEL_MANAGED . 0 == __pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 {
81
- * ( * recursion) . as_mut_ptr ( ) += 1 ;
81
+ * recursion += 1 ;
82
82
true
83
83
} else {
84
84
false
85
85
}
86
86
} else {
87
87
// Success.
88
- assert_eq ! ( * ( * recursion) . as_mut_ptr ( ) , 0 , "Mutex has invalid recursion count" ) ;
88
+ assert_eq ! ( * recursion, 0 , "Mutex has invalid recursion count" ) ;
89
89
true
90
90
}
91
91
}
@@ -113,17 +113,17 @@ impl ReentrantMutex {
113
113
}
114
114
115
115
pub unsafe fn unlock ( & self ) {
116
- let lock = self . lock . get ( ) ;
117
- let recursion = self . recursion . get ( ) ;
116
+ let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
117
+ let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
118
118
assert_eq ! (
119
- ( * ( * lock) . as_mut_ptr ( ) ) . load( Ordering :: Relaxed ) & !abi:: LOCK_KERNEL_MANAGED . 0 ,
119
+ ( * lock) . load( Ordering :: Relaxed ) & !abi:: LOCK_KERNEL_MANAGED . 0 ,
120
120
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
121
121
"This mutex is locked by a different thread"
122
122
) ;
123
123
124
- if * ( * recursion) . as_mut_ptr ( ) > 0 {
125
- * ( * recursion) . as_mut_ptr ( ) -= 1 ;
126
- } else if !( * ( * lock) . as_mut_ptr ( ) )
124
+ if * recursion > 0 {
125
+ * recursion -= 1 ;
126
+ } else if !( * lock)
127
127
. compare_exchange (
128
128
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
129
129
abi:: LOCK_UNLOCKED . 0 ,
@@ -140,13 +140,13 @@ impl ReentrantMutex {
140
140
}
141
141
142
142
pub unsafe fn destroy ( & self ) {
143
- let lock = self . lock . get ( ) ;
144
- let recursion = self . recursion . get ( ) ;
143
+ let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
144
+ let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
145
145
assert_eq ! (
146
- ( * ( * lock) . as_mut_ptr ( ) ) . load( Ordering :: Relaxed ) ,
146
+ ( * lock) . load( Ordering :: Relaxed ) ,
147
147
abi:: LOCK_UNLOCKED . 0 ,
148
148
"Attempted to destroy locked mutex"
149
149
) ;
150
- assert_eq ! ( * ( * recursion) . as_mut_ptr ( ) , 0 , "Recursion counter invalid" ) ;
150
+ assert_eq ! ( * recursion, 0 , "Recursion counter invalid" ) ;
151
151
}
152
152
}
0 commit comments