hal_x86_64/cpu/
entropy.rs1use super::{intrinsics, FeatureNotSupported, Port};
2use core::sync::atomic::{AtomicU16, Ordering};
3use mycelium_util::sync::spin;
4use raw_cpuid::CpuId;
5
6#[cfg(feature = "rand_core")]
7use crate::time::Rdtsc;
8#[cfg(feature = "rand_core")]
9use mycelium_util::sync::Lazy;
10#[cfg(feature = "rand_core")]
11use rand_core::{RngCore, SeedableRng};
12
13#[cfg(feature = "rand_core")]
23pub fn seed_rng<R: SeedableRng>() -> R {
24 let mut seed = R::Seed::default();
25 static RDRAND: Lazy<Option<Rdrand>> = Lazy::new(|| Rdrand::new().ok());
26 static RDTSC: Lazy<Option<Rdtsc>> = Lazy::new(|| Rdtsc::new().ok());
27
28 for byte in seed.as_mut() {
29 let pit = {
33 let mut dst = [0u8];
34 PitEntropy::read_bytes(&mut dst[..]);
35 dst[0]
36 };
37
38 if let Some(rdrand) = RDRAND.as_ref() {
45 *byte ^= rdrand.next_u32().to_ne_bytes()[pit as usize % 4];
46 }
47
48 if let Some(rdtsc) = RDTSC.as_ref() {
56 *byte ^= rdtsc.read_timestamp().to_ne_bytes()[pit as usize % 8];
57 }
58
59 *byte ^= pit;
64 }
65
66 R::from_seed(seed)
67}
68
69#[derive(Debug, Copy, Clone)]
70pub struct Rdrand(());
71
72#[derive(Debug, Copy, Clone, Default)]
73pub struct PitEntropy(());
74
75impl Rdrand {
78 pub const MAX_RETRIES: usize = 64;
79
80 pub fn is_supported() -> bool {
84 CpuId::new()
85 .get_feature_info()
86 .map(|features| features.has_rdrand())
87 .unwrap_or(false)
88 }
89
90 pub fn new() -> Result<Self, FeatureNotSupported> {
91 if Self::is_supported() {
92 Ok(Self(()))
93 } else {
94 Err(FeatureNotSupported::new("rdrand"))
95 }
96 }
97
98 pub fn try_next_u32(&self) -> Option<u32> {
99 let mut res: u32 = 0;
100 unsafe {
101 match intrinsics::rdrand32_step(&mut res) {
102 1 => Some(res),
103 0 => None,
104 x => unreachable!("rdrand32_step should only return 1 or 0, but got {}", x),
105 }
106 }
107 }
108
109 pub fn next_u32(&self) -> u32 {
110 self.with_retry(Self::try_next_u32)
111 }
112
113 pub fn try_next_u64(&self) -> Option<u64> {
114 let mut res: u64 = 0;
115 unsafe {
116 match intrinsics::rdrand64_step(&mut res) {
117 1 => Some(res),
118 0 => None,
119 x => unreachable!("rdrand64_step should only return 1 or 0, but got {}", x),
120 }
121 }
122 }
123
124 pub fn next_u64(&self) -> u64 {
125 self.with_retry(Self::try_next_u64)
126 }
127
128 fn with_retry<T: Default>(&self, f: impl Fn(&Self) -> Option<T>) -> T {
131 let mut backoff = spin::Backoff::new();
132
133 for _ in 0..Self::MAX_RETRIES {
136 if let Some(res) = f(self) {
137 return res;
138 }
139 backoff.spin();
140 }
141 T::default()
142 }
143}
144
145#[cfg(feature = "rand_core")]
146impl RngCore for Rdrand {
147 fn next_u32(&mut self) -> u32 {
148 Rdrand::next_u32(&*self)
149 }
150
151 #[inline]
152 fn next_u64(&mut self) -> u64 {
153 Rdrand::next_u64(&*self)
154 }
155
156 fn fill_bytes(&mut self, dest: &mut [u8]) {
157 rand_core::impls::fill_bytes_via_next(self, dest)
158 }
159
160 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {
161 use core::num::NonZeroU32;
162 let mut left = dest;
163 while left.len() >= 8 {
164 let (l, r) = { left }.split_at_mut(8);
165 left = r;
166
167 let chunk: [u8; 8] = self
168 .try_next_u64()
169 .ok_or_else(|| rand_core::Error::from(NonZeroU32::new(1).expect("1 is non-zero")))?
170 .to_ne_bytes();
171 l.copy_from_slice(&chunk);
172 }
173 let n = left.len();
174 if n > 4 {
175 let chunk: [u8; 8] = self
176 .try_next_u64()
177 .ok_or_else(|| rand_core::Error::from(NonZeroU32::new(1).expect("1 is non-zero")))?
178 .to_ne_bytes();
179 left.copy_from_slice(&chunk[..n]);
180 } else if n > 0 {
181 let chunk: [u8; 4] = self
182 .try_next_u32()
183 .ok_or_else(|| rand_core::Error::from(NonZeroU32::new(1).expect("1 is non-zero")))?
184 .to_ne_bytes();
185 left.copy_from_slice(&chunk[..n]);
186 }
187
188 Ok(())
189 }
190}
191
192#[cfg(feature = "rand_core")]
193impl RngCore for Rdtsc {
194 fn next_u32(&mut self) -> u32 {
195 self.read_timestamp() as u32
196 }
197
198 #[inline]
199 fn next_u64(&mut self) -> u64 {
200 self.read_timestamp()
201 }
202
203 fn fill_bytes(&mut self, dest: &mut [u8]) {
204 rand_core::impls::fill_bytes_via_next(self, dest)
205 }
206
207 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {
208 self.fill_bytes(dest);
209 Ok(())
210 }
211}
212
213impl PitEntropy {
214 pub const fn new() -> Self {
215 Self(())
216 }
217
218 pub fn next_u32() -> u32 {
219 let mut bytes = [0u8; 4];
220 Self::read_bytes(&mut bytes[..]);
221 u32::from_be_bytes(bytes)
222 }
223
224 pub fn next_u64() -> u64 {
225 let mut bytes = [0u8; 8];
226 Self::read_bytes(&mut bytes[..]);
227 u64::from_be_bytes(bytes)
228 }
229
230 fn read_bytes(dest: &mut [u8]) {
231 const PIT_BASE: u16 = 0x40;
232
233 static NEXT_CHANNEL: AtomicU16 = AtomicU16::new(0);
237
238 for byte in dest {
239 let channel = NEXT_CHANNEL.fetch_add(1, Ordering::Relaxed) % 3;
240 *byte = unsafe { Port::at(PIT_BASE + channel).readb() };
241 }
242 }
243}
244
245#[cfg(feature = "rand_core")]
246impl RngCore for PitEntropy {
247 #[inline]
248 fn next_u32(&mut self) -> u32 {
249 Self::next_u32()
250 }
251
252 #[inline]
253 fn next_u64(&mut self) -> u64 {
254 Self::next_u64()
255 }
256
257 fn fill_bytes(&mut self, dest: &mut [u8]) {
258 Self::read_bytes(dest);
259 }
260
261 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {
262 Self::read_bytes(dest);
263 Ok(())
264 }
265}