executor.rs

  1use crate::{AppContext, PlatformDispatcher};
  2use futures::{channel::mpsc, pin_mut, FutureExt};
  3use smol::prelude::*;
  4use std::{
  5    fmt::Debug,
  6    marker::PhantomData,
  7    mem,
  8    pin::Pin,
  9    rc::Rc,
 10    sync::{
 11        atomic::{AtomicBool, Ordering::SeqCst},
 12        Arc,
 13    },
 14    task::{Context, Poll},
 15    time::Duration,
 16};
 17use util::TryFutureExt;
 18use waker_fn::waker_fn;
 19
 20#[derive(Clone)]
 21pub struct BackgroundExecutor {
 22    dispatcher: Arc<dyn PlatformDispatcher>,
 23}
 24
 25#[derive(Clone)]
 26pub struct ForegroundExecutor {
 27    dispatcher: Arc<dyn PlatformDispatcher>,
 28    not_send: PhantomData<Rc<()>>,
 29}
 30
 31#[must_use]
 32#[derive(Debug)]
 33pub enum Task<T> {
 34    Ready(Option<T>),
 35    Spawned(async_task::Task<T>),
 36}
 37
 38impl<T> Task<T> {
 39    pub fn ready(val: T) -> Self {
 40        Task::Ready(Some(val))
 41    }
 42
 43    pub fn detach(self) {
 44        match self {
 45            Task::Ready(_) => {}
 46            Task::Spawned(task) => task.detach(),
 47        }
 48    }
 49}
 50
 51impl<E, T> Task<Result<T, E>>
 52where
 53    T: 'static,
 54    E: 'static + Debug,
 55{
 56    pub fn detach_and_log_err(self, cx: &mut AppContext) {
 57        cx.foreground_executor().spawn(self.log_err()).detach();
 58    }
 59}
 60
 61impl<T> Future for Task<T> {
 62    type Output = T;
 63
 64    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
 65        match unsafe { self.get_unchecked_mut() } {
 66            Task::Ready(val) => Poll::Ready(val.take().unwrap()),
 67            Task::Spawned(task) => task.poll(cx),
 68        }
 69    }
 70}
 71
 72impl BackgroundExecutor {
 73    pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
 74        Self { dispatcher }
 75    }
 76
 77    /// Enqueues the given closure to be run on any thread. The closure returns
 78    /// a future which will be run to completion on any available thread.
 79    pub fn spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Task<R>
 80    where
 81        R: Send + 'static,
 82    {
 83        let dispatcher = self.dispatcher.clone();
 84        let (runnable, task) =
 85            async_task::spawn(future, move |runnable| dispatcher.dispatch(runnable));
 86        runnable.schedule();
 87        Task::Spawned(task)
 88    }
 89
 90    #[cfg(any(test, feature = "test-support"))]
 91    pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R {
 92        self.block_internal(false, future)
 93    }
 94
 95    pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
 96        self.block_internal(true, future)
 97    }
 98
 99    pub(crate) fn block_internal<R>(
100        &self,
101        background_only: bool,
102        future: impl Future<Output = R>,
103    ) -> R {
104        pin_mut!(future);
105        let unparker = self.dispatcher.unparker();
106        let awoken = Arc::new(AtomicBool::new(false));
107
108        let waker = waker_fn({
109            let awoken = awoken.clone();
110            move || {
111                awoken.store(true, SeqCst);
112                unparker.unpark();
113            }
114        });
115        let mut cx = std::task::Context::from_waker(&waker);
116
117        loop {
118            match future.as_mut().poll(&mut cx) {
119                Poll::Ready(result) => return result,
120                Poll::Pending => {
121                    if !self.dispatcher.poll(background_only) {
122                        if awoken.swap(false, SeqCst) {
123                            continue;
124                        }
125
126                        #[cfg(any(test, feature = "test-support"))]
127                        if let Some(test) = self.dispatcher.as_test() {
128                            if !test.parking_allowed() {
129                                let mut backtrace_message = String::new();
130                                if let Some(backtrace) = test.waiting_backtrace() {
131                                    backtrace_message =
132                                        format!("\nbacktrace of waiting future:\n{:?}", backtrace);
133                                }
134                                panic!("parked with nothing left to run\n{:?}", backtrace_message)
135                            }
136                        }
137
138                        self.dispatcher.park();
139                    }
140                }
141            }
142        }
143    }
144
145    pub fn block_with_timeout<R>(
146        &self,
147        duration: Duration,
148        future: impl Future<Output = R>,
149    ) -> Result<R, impl Future<Output = R>> {
150        let mut future = Box::pin(future.fuse());
151        if duration.is_zero() {
152            return Err(future);
153        }
154
155        let mut timer = self.timer(duration).fuse();
156        let timeout = async {
157            futures::select_biased! {
158                value = future => Ok(value),
159                _ = timer => Err(()),
160            }
161        };
162        match self.block(timeout) {
163            Ok(value) => Ok(value),
164            Err(_) => Err(future),
165        }
166    }
167
168    pub async fn scoped<'scope, F>(&self, scheduler: F)
169    where
170        F: FnOnce(&mut Scope<'scope>),
171    {
172        let mut scope = Scope::new(self.clone());
173        (scheduler)(&mut scope);
174        let spawned = mem::take(&mut scope.futures)
175            .into_iter()
176            .map(|f| self.spawn(f))
177            .collect::<Vec<_>>();
178        for task in spawned {
179            task.await;
180        }
181    }
182
183    pub fn timer(&self, duration: Duration) -> Task<()> {
184        let (runnable, task) = async_task::spawn(async move {}, {
185            let dispatcher = self.dispatcher.clone();
186            move |runnable| dispatcher.dispatch_after(duration, runnable)
187        });
188        runnable.schedule();
189        Task::Spawned(task)
190    }
191
192    #[cfg(any(test, feature = "test-support"))]
193    pub fn start_waiting(&self) {
194        self.dispatcher.as_test().unwrap().start_waiting();
195    }
196
197    #[cfg(any(test, feature = "test-support"))]
198    pub fn finish_waiting(&self) {
199        self.dispatcher.as_test().unwrap().finish_waiting();
200    }
201
202    #[cfg(any(test, feature = "test-support"))]
203    pub fn simulate_random_delay(&self) -> impl Future<Output = ()> {
204        self.dispatcher.as_test().unwrap().simulate_random_delay()
205    }
206
207    #[cfg(any(test, feature = "test-support"))]
208    pub fn advance_clock(&self, duration: Duration) {
209        self.dispatcher.as_test().unwrap().advance_clock(duration)
210    }
211
212    #[cfg(any(test, feature = "test-support"))]
213    pub fn run_until_parked(&self) {
214        self.dispatcher.as_test().unwrap().run_until_parked()
215    }
216
217    #[cfg(any(test, feature = "test-support"))]
218    pub fn allow_parking(&self) {
219        self.dispatcher.as_test().unwrap().allow_parking();
220    }
221
222    pub fn num_cpus(&self) -> usize {
223        num_cpus::get()
224    }
225
226    pub fn is_main_thread(&self) -> bool {
227        self.dispatcher.is_main_thread()
228    }
229}
230
231impl ForegroundExecutor {
232    pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
233        Self {
234            dispatcher,
235            not_send: PhantomData,
236        }
237    }
238
239    /// Enqueues the given closure to be run on any thread. The closure returns
240    /// a future which will be run to completion on any available thread.
241    pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
242    where
243        R: 'static,
244    {
245        let dispatcher = self.dispatcher.clone();
246        let (runnable, task) = async_task::spawn_local(future, move |runnable| {
247            dispatcher.dispatch_on_main_thread(runnable)
248        });
249        runnable.schedule();
250        Task::Spawned(task)
251    }
252}
253
254pub struct Scope<'a> {
255    executor: BackgroundExecutor,
256    futures: Vec<Pin<Box<dyn Future<Output = ()> + Send + 'static>>>,
257    tx: Option<mpsc::Sender<()>>,
258    rx: mpsc::Receiver<()>,
259    lifetime: PhantomData<&'a ()>,
260}
261
262impl<'a> Scope<'a> {
263    fn new(executor: BackgroundExecutor) -> Self {
264        let (tx, rx) = mpsc::channel(1);
265        Self {
266            executor,
267            tx: Some(tx),
268            rx,
269            futures: Default::default(),
270            lifetime: PhantomData,
271        }
272    }
273
274    pub fn spawn<F>(&mut self, f: F)
275    where
276        F: Future<Output = ()> + Send + 'a,
277    {
278        let tx = self.tx.clone().unwrap();
279
280        // Safety: The 'a lifetime is guaranteed to outlive any of these futures because
281        // dropping this `Scope` blocks until all of the futures have resolved.
282        let f = unsafe {
283            mem::transmute::<
284                Pin<Box<dyn Future<Output = ()> + Send + 'a>>,
285                Pin<Box<dyn Future<Output = ()> + Send + 'static>>,
286            >(Box::pin(async move {
287                f.await;
288                drop(tx);
289            }))
290        };
291        self.futures.push(f);
292    }
293}
294
295impl<'a> Drop for Scope<'a> {
296    fn drop(&mut self) {
297        self.tx.take().unwrap();
298
299        // Wait until the channel is closed, which means that all of the spawned
300        // futures have resolved.
301        self.executor.block(self.rx.next());
302    }
303}