Skip to content

Adds partial_cmp.rs file and partial_cmp signature to mod.rs #268

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 14 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
159 changes: 42 additions & 117 deletions src/stream/stream/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,19 +30,17 @@ mod filter_map;
mod find;
mod find_map;
mod fold;
mod for_each;
mod fuse;
mod inspect;
mod map;
mod min_by;
mod next;
mod nth;
mod partial_cmp;
mod scan;
mod skip;
mod skip_while;
mod step_by;
mod take;
mod try_for_each;
mod zip;

use all::AllFuture;
Expand All @@ -52,17 +50,15 @@ use filter_map::FilterMap;
use find::FindFuture;
use find_map::FindMapFuture;
use fold::FoldFuture;
use for_each::ForEachFuture;
use min_by::MinByFuture;
use next::NextFuture;
use nth::NthFuture;
use try_for_each::TryForEeachFuture;
use partial_cmp::PartialCmpFuture;

pub use chain::Chain;
pub use filter::Filter;
pub use fuse::Fuse;
pub use inspect::Inspect;
pub use map::Map;
pub use scan::Scan;
pub use skip::Skip;
pub use skip_while::SkipWhile;
Expand Down Expand Up @@ -344,37 +340,6 @@ extension_trait! {
Enumerate::new(self)
}

#[doc = r#"
Takes a closure and creates a stream that calls that closure on every element of this stream.

# Examples

```
# fn main() { async_std::task::block_on(async {
#
use async_std::prelude::*;
use std::collections::VecDeque;

let s: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
let mut s = s.map(|x| 2 * x);

assert_eq!(s.next().await, Some(2));
assert_eq!(s.next().await, Some(4));
assert_eq!(s.next().await, Some(6));
assert_eq!(s.next().await, None);

#
# }) }
```
"#]
fn map<B, F>(self, f: F) -> Map<Self, F, Self::Item, B>
where
Self: Sized,
F: FnMut(Self::Item) -> B,
{
Map::new(self, f)
}

#[doc = r#"
A combinator that does something with each element in the stream, passing the value
on.
Expand Down Expand Up @@ -791,41 +756,6 @@ extension_trait! {
FoldFuture::new(self, init, f)
}

#[doc = r#"
Call a closure on each element of the stream.

# Examples

```
# fn main() { async_std::task::block_on(async {
#
use async_std::prelude::*;
use std::collections::VecDeque;
use std::sync::mpsc::channel;

let (tx, rx) = channel();

let s: VecDeque<usize> = vec![1, 2, 3].into_iter().collect();
let sum = s.for_each(move |x| tx.clone().send(x).unwrap()).await;

let v: Vec<_> = rx.iter().collect();

assert_eq!(v, vec![1, 2, 3]);
#
# }) }
```
"#]
fn for_each<F>(
self,
f: F,
) -> impl Future<Output = ()> [ForEachFuture<Self, F, Self::Item>]
where
Self: Sized,
F: FnMut(Self::Item),
{
ForEachFuture::new(self, f)
}

#[doc = r#"
Tests if any element of the stream matches a predicate.

Expand Down Expand Up @@ -997,51 +927,6 @@ extension_trait! {
Skip::new(self, n)
}

#[doc = r#"
Applies a falliable function to each element in a stream, stopping at first error and returning it.

# Examples

```
# fn main() { async_std::task::block_on(async {
#
use std::collections::VecDeque;
use std::sync::mpsc::channel;
use async_std::prelude::*;

let (tx, rx) = channel();

let s: VecDeque<usize> = vec![1, 2, 3].into_iter().collect();
let s = s.try_for_each(|v| {
if v % 2 == 1 {
tx.clone().send(v).unwrap();
Ok(())
} else {
Err("even")
}
});

let res = s.await;
drop(tx);
let values: Vec<_> = rx.iter().collect();

assert_eq!(values, vec![1]);
assert_eq!(res, Err("even"));
#
# }) }
```
"#]
fn try_for_each<F, E>(
self,
f: F,
) -> impl Future<Output = E> [TryForEeachFuture<Self, F, Self::Item, E>]
where
Self: Sized,
F: FnMut(Self::Item) -> Result<(), E>,
{
TryForEeachFuture::new(self, f)
}

#[doc = r#"
'Zips up' two streams into a single stream of pairs.

Expand Down Expand Up @@ -1187,6 +1072,46 @@ extension_trait! {
{
Merge::new(self, other)
}

#[doc = r#"
Lexicographically compares the elements of this `Stream` with those
of another.

# Examples
```
# fn main() { async_std::task::block_on(async {
#
use async_std::prelude::*;
use std::collections::VecDeque;

use std::cmp::Ordering;

let s1 = VecDeque::from(vec![1]);
let s2 = VecDeque::from(vec![1, 2]);
let s3 = VecDeque::from(vec![1, 2, 3]);
let s4 = VecDeque::from(vec![1, 2, 4]);

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably also tests for Greater and Less for streams of the same number of elements but with different values?

assert_eq!(s1.clone().partial_cmp(s1.clone()).await, Some(Ordering::Equal));
assert_eq!(s1.clone().partial_cmp(s2.clone()).await, Some(Ordering::Less));
assert_eq!(s2.clone().partial_cmp(s1.clone()).await, Some(Ordering::Greater));
assert_eq!(s3.clone().partial_cmp(s4.clone()).await, Some(Ordering::Less));
assert_eq!(s4.clone().partial_cmp(s3.clone()).await, Some(Ordering::Greater));

#
# }) }
```
"#]
fn partial_cmp<S>(
self,
other: S
) -> impl Future<Output = Option<Ordering>> [PartialCmpFuture<Self, S>]
where
Self: Sized + Stream,
S: Stream,
<Self as Stream>::Item: PartialOrd<S::Item>,
{
PartialCmpFuture::new(self, other)
}
}

impl<S: Stream + Unpin + ?Sized> Stream for Box<S> {
Expand Down
92 changes: 92 additions & 0 deletions src/stream/stream/partial_cmp.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
use std::cmp::Ordering;
use std::pin::Pin;

use super::fuse::Fuse;
use crate::future::Future;
use crate::prelude::*;
use crate::stream::Stream;
use crate::task::{Context, Poll};

// Lexicographically compares the elements of this `Stream` with those
// of another.
#[doc(hidden)]
#[allow(missing_debug_implementations)]
pub struct PartialCmpFuture<L: Stream, R: Stream> {
l: Fuse<L>,
r: Fuse<R>,
l_cache: Option<L::Item>,
r_cache: Option<R::Item>,
}

impl<L: Stream, R: Stream> PartialCmpFuture<L, R> {
pin_utils::unsafe_pinned!(l: Fuse<L>);
pin_utils::unsafe_pinned!(r: Fuse<R>);
pin_utils::unsafe_unpinned!(l_cache: Option<L::Item>);
pin_utils::unsafe_unpinned!(r_cache: Option<R::Item>);

pub(super) fn new(l: L, r: R) -> Self {
PartialCmpFuture {
l: l.fuse(),
r: r.fuse(),
l_cache: None,
r_cache: None,
}
}
}

impl<L: Stream, R: Stream> Future for PartialCmpFuture<L, R>
where
L: Stream + Sized,
R: Stream + Sized,
L::Item: PartialOrd<R::Item>,
{
type Output = Option<Ordering>;

fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

uhhh, this doesn't look right. Values might arrive from streams at different points at time, but if this happens this doesn't mean that the streams are not equal. The more correct behaviour here would probably be as follows:

  1. We cache both elements we are currently comparing in the state of the PartialCmpFuture, in Options.
  2. As soon as both elements are received (Both options are .is_some()) we can compare elements.
  3. We have to consider all edge cases: i.e.
    a) we have l_next, l has closed, but we don't have r_next and r is not yet closed
    b) the same for r_next
    c) both streams are closed but we have only one element left in cache
    d) and so on

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see. I wanted to start by replicating the std::iter code for partial_cmp as much as possible, but you're correct that it doesn't account for values from each stream arriving at different times. Thanks for the feedback.

Copy link
Member

@montekki montekki Oct 1, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I also think that fuseing both streams would help.

loop {
// Short circuit logic
// Stream that completes earliest can be considered Less, etc
let l_complete = self.l.done && self.as_mut().l_cache.is_none();
let r_complete = self.r.done && self.as_mut().r_cache.is_none();

if l_complete && r_complete {
return Poll::Ready(Some(Ordering::Equal));
} else if l_complete {
return Poll::Ready(Some(Ordering::Less));
} else if r_complete {
return Poll::Ready(Some(Ordering::Greater));
}

// Get next value if possible and necesary
if !self.l.done && self.as_mut().l_cache.is_none() {
let l_next = futures_core::ready!(self.as_mut().l().poll_next(cx));
if let Some(item) = l_next {
*self.as_mut().l_cache() = Some(item);
}
}

if !self.r.done && self.as_mut().r_cache.is_none() {
let r_next = futures_core::ready!(self.as_mut().r().poll_next(cx));
if let Some(item) = r_next {
*self.as_mut().r_cache() = Some(item);
}
}

// Compare if both values are available.
if self.as_mut().l_cache.is_some() && self.as_mut().r_cache.is_some() {
let l_value = self.as_mut().l_cache().take().unwrap();
let r_value = self.as_mut().r_cache().take().unwrap();
let result = l_value.partial_cmp(&r_value);

if let Some(Ordering::Equal) = result {
// Reset cache to prepare for next comparison
*self.as_mut().l_cache() = None;
*self.as_mut().r_cache() = None;
} else {
// Return non equal value
return Poll::Ready(result);
}
}
}
}
}