executor.rs

  1use crate::{AppContext, PlatformDispatcher};
  2use futures::{channel::mpsc, pin_mut, FutureExt};
  3use smol::prelude::*;
  4use std::{
  5    fmt::Debug,
  6    marker::PhantomData,
  7    mem,
  8    pin::Pin,
  9    rc::Rc,
 10    sync::{
 11        atomic::{AtomicBool, Ordering::SeqCst},
 12        Arc,
 13    },
 14    task::{Context, Poll},
 15    time::Duration,
 16};
 17use util::TryFutureExt;
 18use waker_fn::waker_fn;
 19
 20#[derive(Clone)]
 21pub struct BackgroundExecutor {
 22    dispatcher: Arc<dyn PlatformDispatcher>,
 23}
 24
 25#[derive(Clone)]
 26pub struct ForegroundExecutor {
 27    dispatcher: Arc<dyn PlatformDispatcher>,
 28    not_send: PhantomData<Rc<()>>,
 29}
 30
 31#[must_use]
 32pub enum Task<T> {
 33    Ready(Option<T>),
 34    Spawned(async_task::Task<T>),
 35}
 36
 37impl<T> Task<T> {
 38    pub fn ready(val: T) -> Self {
 39        Task::Ready(Some(val))
 40    }
 41
 42    pub fn detach(self) {
 43        match self {
 44            Task::Ready(_) => {}
 45            Task::Spawned(task) => task.detach(),
 46        }
 47    }
 48}
 49
 50impl<E, T> Task<Result<T, E>>
 51where
 52    T: 'static + Send,
 53    E: 'static + Send + Debug,
 54{
 55    pub fn detach_and_log_err(self, cx: &mut AppContext) {
 56        cx.background_executor().spawn(self.log_err()).detach();
 57    }
 58}
 59
 60impl<T> Future for Task<T> {
 61    type Output = T;
 62
 63    fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
 64        match unsafe { self.get_unchecked_mut() } {
 65            Task::Ready(val) => Poll::Ready(val.take().unwrap()),
 66            Task::Spawned(task) => task.poll(cx),
 67        }
 68    }
 69}
 70
 71impl BackgroundExecutor {
 72    pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
 73        Self { dispatcher }
 74    }
 75
 76    /// Enqueues the given closure to be run on any thread. The closure returns
 77    /// a future which will be run to completion on any available thread.
 78    pub fn spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Task<R>
 79    where
 80        R: Send + 'static,
 81    {
 82        let dispatcher = self.dispatcher.clone();
 83        let (runnable, task) =
 84            async_task::spawn(future, move |runnable| dispatcher.dispatch(runnable));
 85        runnable.schedule();
 86        Task::Spawned(task)
 87    }
 88
 89    #[cfg(any(test, feature = "test-support"))]
 90    pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R {
 91        self.block_internal(false, future)
 92    }
 93
 94    pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
 95        self.block_internal(true, future)
 96    }
 97
 98    pub(crate) fn block_internal<R>(
 99        &self,
100        background_only: bool,
101        future: impl Future<Output = R>,
102    ) -> R {
103        pin_mut!(future);
104        let (parker, unparker) = parking::pair();
105        let awoken = Arc::new(AtomicBool::new(false));
106        let awoken2 = awoken.clone();
107
108        let waker = waker_fn(move || {
109            awoken2.store(true, SeqCst);
110            unparker.unpark();
111        });
112        let mut cx = std::task::Context::from_waker(&waker);
113
114        loop {
115            match future.as_mut().poll(&mut cx) {
116                Poll::Ready(result) => return result,
117                Poll::Pending => {
118                    if !self.dispatcher.poll(background_only) {
119                        if awoken.swap(false, SeqCst) {
120                            continue;
121                        }
122
123                        #[cfg(any(test, feature = "test-support"))]
124                        if let Some(test) = self.dispatcher.as_test() {
125                            if !test.parking_allowed() {
126                                let mut backtrace_message = String::new();
127                                if let Some(backtrace) = test.waiting_backtrace() {
128                                    backtrace_message =
129                                        format!("\nbacktrace of waiting future:\n{:?}", backtrace);
130                                }
131                                panic!("parked with nothing left to run\n{:?}", backtrace_message)
132                            }
133                        }
134                        parker.park();
135                    }
136                }
137            }
138        }
139    }
140
141    pub fn block_with_timeout<R>(
142        &self,
143        duration: Duration,
144        future: impl Future<Output = R>,
145    ) -> Result<R, impl Future<Output = R>> {
146        let mut future = Box::pin(future.fuse());
147        if duration.is_zero() {
148            return Err(future);
149        }
150
151        let mut timer = self.timer(duration).fuse();
152        let timeout = async {
153            futures::select_biased! {
154                value = future => Ok(value),
155                _ = timer => Err(()),
156            }
157        };
158        match self.block(timeout) {
159            Ok(value) => Ok(value),
160            Err(_) => Err(future),
161        }
162    }
163
164    pub async fn scoped<'scope, F>(&self, scheduler: F)
165    where
166        F: FnOnce(&mut Scope<'scope>),
167    {
168        let mut scope = Scope::new(self.clone());
169        (scheduler)(&mut scope);
170        let spawned = mem::take(&mut scope.futures)
171            .into_iter()
172            .map(|f| self.spawn(f))
173            .collect::<Vec<_>>();
174        for task in spawned {
175            task.await;
176        }
177    }
178
179    pub fn timer(&self, duration: Duration) -> Task<()> {
180        let (runnable, task) = async_task::spawn(async move {}, {
181            let dispatcher = self.dispatcher.clone();
182            move |runnable| dispatcher.dispatch_after(duration, runnable)
183        });
184        runnable.schedule();
185        Task::Spawned(task)
186    }
187
188    #[cfg(any(test, feature = "test-support"))]
189    pub fn start_waiting(&self) {
190        self.dispatcher.as_test().unwrap().start_waiting();
191    }
192
193    #[cfg(any(test, feature = "test-support"))]
194    pub fn finish_waiting(&self) {
195        self.dispatcher.as_test().unwrap().finish_waiting();
196    }
197
198    #[cfg(any(test, feature = "test-support"))]
199    pub fn simulate_random_delay(&self) -> impl Future<Output = ()> {
200        self.dispatcher.as_test().unwrap().simulate_random_delay()
201    }
202
203    #[cfg(any(test, feature = "test-support"))]
204    pub fn advance_clock(&self, duration: Duration) {
205        self.dispatcher.as_test().unwrap().advance_clock(duration)
206    }
207
208    #[cfg(any(test, feature = "test-support"))]
209    pub fn run_until_parked(&self) {
210        self.dispatcher.as_test().unwrap().run_until_parked()
211    }
212
213    #[cfg(any(test, feature = "test-support"))]
214    pub fn allow_parking(&self) {
215        self.dispatcher.as_test().unwrap().allow_parking();
216    }
217
218    pub fn num_cpus(&self) -> usize {
219        num_cpus::get()
220    }
221
222    pub fn is_main_thread(&self) -> bool {
223        self.dispatcher.is_main_thread()
224    }
225}
226
227impl ForegroundExecutor {
228    pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
229        Self {
230            dispatcher,
231            not_send: PhantomData,
232        }
233    }
234
235    /// Enqueues the given closure to be run on any thread. The closure returns
236    /// a future which will be run to completion on any available thread.
237    pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
238    where
239        R: 'static,
240    {
241        let dispatcher = self.dispatcher.clone();
242        let (runnable, task) = async_task::spawn_local(future, move |runnable| {
243            dispatcher.dispatch_on_main_thread(runnable)
244        });
245        runnable.schedule();
246        Task::Spawned(task)
247    }
248}
249
250pub struct Scope<'a> {
251    executor: BackgroundExecutor,
252    futures: Vec<Pin<Box<dyn Future<Output = ()> + Send + 'static>>>,
253    tx: Option<mpsc::Sender<()>>,
254    rx: mpsc::Receiver<()>,
255    lifetime: PhantomData<&'a ()>,
256}
257
258impl<'a> Scope<'a> {
259    fn new(executor: BackgroundExecutor) -> Self {
260        let (tx, rx) = mpsc::channel(1);
261        Self {
262            executor,
263            tx: Some(tx),
264            rx,
265            futures: Default::default(),
266            lifetime: PhantomData,
267        }
268    }
269
270    pub fn spawn<F>(&mut self, f: F)
271    where
272        F: Future<Output = ()> + Send + 'a,
273    {
274        let tx = self.tx.clone().unwrap();
275
276        // Safety: The 'a lifetime is guaranteed to outlive any of these futures because
277        // dropping this `Scope` blocks until all of the futures have resolved.
278        let f = unsafe {
279            mem::transmute::<
280                Pin<Box<dyn Future<Output = ()> + Send + 'a>>,
281                Pin<Box<dyn Future<Output = ()> + Send + 'static>>,
282            >(Box::pin(async move {
283                f.await;
284                drop(tx);
285            }))
286        };
287        self.futures.push(f);
288    }
289}
290
291impl<'a> Drop for Scope<'a> {
292    fn drop(&mut self) {
293        self.tx.take().unwrap();
294
295        // Wait until the channel is closed, which means that all of the spawned
296        // futures have resolved.
297        self.executor.block(self.rx.next());
298    }
299}