executor.rs

  1use crate::{AppContext, PlatformDispatcher};
  2use futures::{channel::mpsc, pin_mut, FutureExt};
  3use smol::prelude::*;
  4use std::{
  5    fmt::Debug,
  6    marker::PhantomData,
  7    mem,
  8    num::NonZeroUsize,
  9    pin::Pin,
 10    rc::Rc,
 11    sync::{
 12        atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
 13        Arc,
 14    },
 15    task::{Context, Poll},
 16    time::Duration,
 17};
 18use util::TryFutureExt;
 19use waker_fn::waker_fn;
 20
 21#[cfg(any(test, feature = "test-support"))]
 22use rand::rngs::StdRng;
 23
 24#[derive(Clone)]
 25pub struct BackgroundExecutor {
 26    dispatcher: Arc<dyn PlatformDispatcher>,
 27}
 28
 29#[derive(Clone)]
 30pub struct ForegroundExecutor {
 31    dispatcher: Arc<dyn PlatformDispatcher>,
 32    not_send: PhantomData<Rc<()>>,
 33}
 34
 35#[must_use]
 36#[derive(Debug)]
 37pub enum Task<T> {
 38    Ready(Option<T>),
 39    Spawned(async_task::Task<T>),
 40}
 41
 42impl<T> Task<T> {
 43    pub fn ready(val: T) -> Self {
 44        Task::Ready(Some(val))
 45    }
 46
 47    pub fn detach(self) {
 48        match self {
 49            Task::Ready(_) => {}
 50            Task::Spawned(task) => task.detach(),
 51        }
 52    }
 53}
 54
 55impl<E, T> Task<Result<T, E>>
 56where
 57    T: 'static,
 58    E: 'static + Debug,
 59{
 60    pub fn detach_and_log_err(self, cx: &mut AppContext) {
 61        cx.foreground_executor().spawn(self.log_err()).detach();
 62    }
 63}
 64
 65impl<T> Future for Task<T> {
 66    type Output = T;
 67
 68    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
 69        match unsafe { self.get_unchecked_mut() } {
 70            Task::Ready(val) => Poll::Ready(val.take().unwrap()),
 71            Task::Spawned(task) => task.poll(cx),
 72        }
 73    }
 74}
 75
 76#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
 77pub struct TaskLabel(NonZeroUsize);
 78
 79impl TaskLabel {
 80    pub fn new() -> Self {
 81        static NEXT_TASK_LABEL: AtomicUsize = AtomicUsize::new(1);
 82        Self(NEXT_TASK_LABEL.fetch_add(1, SeqCst).try_into().unwrap())
 83    }
 84}
 85
 86type AnyLocalFuture<R> = Pin<Box<dyn 'static + Future<Output = R>>>;
 87
 88type AnyFuture<R> = Pin<Box<dyn 'static + Send + Future<Output = R>>>;
 89
 90impl BackgroundExecutor {
 91    pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
 92        Self { dispatcher }
 93    }
 94
 95    /// Enqueues the given future to be run to completion on a background thread.
 96    pub fn spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Task<R>
 97    where
 98        R: Send + 'static,
 99    {
100        self.spawn_internal::<R>(Box::pin(future), None)
101    }
102
103    /// Enqueues the given future to be run to completion on a background thread.
104    /// The given label can be used to control the priority of the task in tests.
105    pub fn spawn_labeled<R>(
106        &self,
107        label: TaskLabel,
108        future: impl Future<Output = R> + Send + 'static,
109    ) -> Task<R>
110    where
111        R: Send + 'static,
112    {
113        self.spawn_internal::<R>(Box::pin(future), Some(label))
114    }
115
116    fn spawn_internal<R: Send + 'static>(
117        &self,
118        future: AnyFuture<R>,
119        label: Option<TaskLabel>,
120    ) -> Task<R> {
121        let dispatcher = self.dispatcher.clone();
122        let (runnable, task) =
123            async_task::spawn(future, move |runnable| dispatcher.dispatch(runnable, label));
124        runnable.schedule();
125        Task::Spawned(task)
126    }
127
128    #[cfg(any(test, feature = "test-support"))]
129    #[track_caller]
130    pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R {
131        self.block_internal(false, future)
132    }
133
134    pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
135        self.block_internal(true, future)
136    }
137
138    #[track_caller]
139    pub(crate) fn block_internal<R>(
140        &self,
141        background_only: bool,
142        future: impl Future<Output = R>,
143    ) -> R {
144        pin_mut!(future);
145        let unparker = self.dispatcher.unparker();
146        let awoken = Arc::new(AtomicBool::new(false));
147
148        let waker = waker_fn({
149            let awoken = awoken.clone();
150            move || {
151                awoken.store(true, SeqCst);
152                unparker.unpark();
153            }
154        });
155        let mut cx = std::task::Context::from_waker(&waker);
156
157        loop {
158            match future.as_mut().poll(&mut cx) {
159                Poll::Ready(result) => return result,
160                Poll::Pending => {
161                    if !self.dispatcher.tick(background_only) {
162                        if awoken.swap(false, SeqCst) {
163                            continue;
164                        }
165
166                        #[cfg(any(test, feature = "test-support"))]
167                        if let Some(test) = self.dispatcher.as_test() {
168                            if !test.parking_allowed() {
169                                let mut backtrace_message = String::new();
170                                if let Some(backtrace) = test.waiting_backtrace() {
171                                    backtrace_message =
172                                        format!("\nbacktrace of waiting future:\n{:?}", backtrace);
173                                }
174                                panic!("parked with nothing left to run\n{:?}", backtrace_message)
175                            }
176                        }
177
178                        self.dispatcher.park();
179                    }
180                }
181            }
182        }
183    }
184
185    pub fn block_with_timeout<R>(
186        &self,
187        duration: Duration,
188        future: impl Future<Output = R>,
189    ) -> Result<R, impl Future<Output = R>> {
190        let mut future = Box::pin(future.fuse());
191        if duration.is_zero() {
192            return Err(future);
193        }
194
195        let mut timer = self.timer(duration).fuse();
196        let timeout = async {
197            futures::select_biased! {
198                value = future => Ok(value),
199                _ = timer => Err(()),
200            }
201        };
202        match self.block(timeout) {
203            Ok(value) => Ok(value),
204            Err(_) => Err(future),
205        }
206    }
207
208    pub async fn scoped<'scope, F>(&self, scheduler: F)
209    where
210        F: FnOnce(&mut Scope<'scope>),
211    {
212        let mut scope = Scope::new(self.clone());
213        (scheduler)(&mut scope);
214        let spawned = mem::take(&mut scope.futures)
215            .into_iter()
216            .map(|f| self.spawn(f))
217            .collect::<Vec<_>>();
218        for task in spawned {
219            task.await;
220        }
221    }
222
223    pub fn timer(&self, duration: Duration) -> Task<()> {
224        let (runnable, task) = async_task::spawn(async move {}, {
225            let dispatcher = self.dispatcher.clone();
226            move |runnable| dispatcher.dispatch_after(duration, runnable)
227        });
228        runnable.schedule();
229        Task::Spawned(task)
230    }
231
232    #[cfg(any(test, feature = "test-support"))]
233    pub fn start_waiting(&self) {
234        self.dispatcher.as_test().unwrap().start_waiting();
235    }
236
237    #[cfg(any(test, feature = "test-support"))]
238    pub fn finish_waiting(&self) {
239        self.dispatcher.as_test().unwrap().finish_waiting();
240    }
241
242    #[cfg(any(test, feature = "test-support"))]
243    pub fn simulate_random_delay(&self) -> impl Future<Output = ()> {
244        self.dispatcher.as_test().unwrap().simulate_random_delay()
245    }
246
247    #[cfg(any(test, feature = "test-support"))]
248    pub fn deprioritize(&self, task_label: TaskLabel) {
249        self.dispatcher.as_test().unwrap().deprioritize(task_label)
250    }
251
252    #[cfg(any(test, feature = "test-support"))]
253    pub fn advance_clock(&self, duration: Duration) {
254        self.dispatcher.as_test().unwrap().advance_clock(duration)
255    }
256
257    #[cfg(any(test, feature = "test-support"))]
258    pub fn tick(&self) -> bool {
259        self.dispatcher.as_test().unwrap().tick(false)
260    }
261
262    #[cfg(any(test, feature = "test-support"))]
263    pub fn run_until_parked(&self) {
264        self.dispatcher.as_test().unwrap().run_until_parked()
265    }
266
267    #[cfg(any(test, feature = "test-support"))]
268    pub fn allow_parking(&self) {
269        self.dispatcher.as_test().unwrap().allow_parking();
270    }
271
272    #[cfg(any(test, feature = "test-support"))]
273    pub fn rng(&self) -> StdRng {
274        self.dispatcher.as_test().unwrap().rng()
275    }
276
277    pub fn num_cpus(&self) -> usize {
278        num_cpus::get()
279    }
280
281    pub fn is_main_thread(&self) -> bool {
282        self.dispatcher.is_main_thread()
283    }
284}
285
286impl ForegroundExecutor {
287    pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
288        Self {
289            dispatcher,
290            not_send: PhantomData,
291        }
292    }
293
294    /// Enqueues the given closure to be run on any thread. The closure returns
295    /// a future which will be run to completion on any available thread.
296    pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
297    where
298        R: 'static,
299    {
300        let dispatcher = self.dispatcher.clone();
301        fn inner<R: 'static>(
302            dispatcher: Arc<dyn PlatformDispatcher>,
303            future: AnyLocalFuture<R>,
304        ) -> Task<R> {
305            let (runnable, task) = async_task::spawn_local(future, move |runnable| {
306                dispatcher.dispatch_on_main_thread(runnable)
307            });
308            runnable.schedule();
309            Task::Spawned(task)
310        }
311        inner::<R>(dispatcher, Box::pin(future))
312    }
313}
314
315pub struct Scope<'a> {
316    executor: BackgroundExecutor,
317    futures: Vec<Pin<Box<dyn Future<Output = ()> + Send + 'static>>>,
318    tx: Option<mpsc::Sender<()>>,
319    rx: mpsc::Receiver<()>,
320    lifetime: PhantomData<&'a ()>,
321}
322
323impl<'a> Scope<'a> {
324    fn new(executor: BackgroundExecutor) -> Self {
325        let (tx, rx) = mpsc::channel(1);
326        Self {
327            executor,
328            tx: Some(tx),
329            rx,
330            futures: Default::default(),
331            lifetime: PhantomData,
332        }
333    }
334
335    pub fn spawn<F>(&mut self, f: F)
336    where
337        F: Future<Output = ()> + Send + 'a,
338    {
339        let tx = self.tx.clone().unwrap();
340
341        // Safety: The 'a lifetime is guaranteed to outlive any of these futures because
342        // dropping this `Scope` blocks until all of the futures have resolved.
343        let f = unsafe {
344            mem::transmute::<
345                Pin<Box<dyn Future<Output = ()> + Send + 'a>>,
346                Pin<Box<dyn Future<Output = ()> + Send + 'static>>,
347            >(Box::pin(async move {
348                f.await;
349                drop(tx);
350            }))
351        };
352        self.futures.push(f);
353    }
354}
355
356impl<'a> Drop for Scope<'a> {
357    fn drop(&mut self) {
358        self.tx.take().unwrap();
359
360        // Wait until the channel is closed, which means that all of the spawned
361        // futures have resolved.
362        self.executor.block(self.rx.next());
363    }
364}