diff --git a/container/src/columnation.rs b/container/src/columnation.rs index 460640ad3..2f3c3223c 100644 --- a/container/src/columnation.rs +++ b/container/src/columnation.rs @@ -339,7 +339,7 @@ mod container { use crate::columnation::{Columnation, TimelyStack}; - impl Container for TimelyStack { + impl Container for TimelyStack { type ItemRef<'a> = &'a T where Self: 'a; type Item<'a> = &'a T where Self: 'a; @@ -355,20 +355,20 @@ mod container { TimelyStack::clear(self) } - type Iter<'a> = std::slice::Iter<'a, T>; + type Iter<'a> = std::slice::Iter<'a, T> where Self: 'a; fn iter(&self) -> Self::Iter<'_> { self.deref().iter() } - type DrainIter<'a> = std::slice::Iter<'a, T>; + type DrainIter<'a> = std::slice::Iter<'a, T> where Self: 'a; fn drain(&mut self) -> Self::DrainIter<'_> { (*self).iter() } } - impl SizableContainer for TimelyStack { + impl SizableContainer for TimelyStack { fn capacity(&self) -> usize { self.capacity() } diff --git a/container/src/flatcontainer.rs b/container/src/flatcontainer.rs index 90855a1f0..295cb270b 100644 --- a/container/src/flatcontainer.rs +++ b/container/src/flatcontainer.rs @@ -3,7 +3,7 @@ pub use flatcontainer::*; use crate::{buffer, Container, SizableContainer, PushInto}; -impl Container for FlatStack { +impl Container for FlatStack { type ItemRef<'a> = R::ReadItem<'a> where Self: 'a; type Item<'a> = R::ReadItem<'a> where Self: 'a; @@ -15,20 +15,20 @@ impl Container for FlatStack { self.clear() } - type Iter<'a> = <&'a Self as IntoIterator>::IntoIter; + type Iter<'a> = <&'a Self as IntoIterator>::IntoIter where Self: 'a; fn iter(&self) -> Self::Iter<'_> { IntoIterator::into_iter(self) } - type DrainIter<'a> = Self::Iter<'a>; + type DrainIter<'a> = Self::Iter<'a> where Self: 'a; fn drain(&mut self) -> Self::DrainIter<'_> { IntoIterator::into_iter(&*self) } } -impl SizableContainer for FlatStack { +impl SizableContainer for FlatStack { fn capacity(&self) -> usize { self.capacity() } diff --git a/container/src/lib.rs b/container/src/lib.rs index e22b2471a..a42abe888 100644 --- a/container/src/lib.rs +++ b/container/src/lib.rs @@ -18,8 +18,7 @@ pub mod flatcontainer; /// We require the container to be cloneable to enable efficient copies when providing references /// of containers to operators. Care must be taken that the type's `clone_from` implementation /// is efficient (which is not necessarily the case when deriving `Clone`.) -/// TODO: Don't require `Container: Clone` -pub trait Container: Default + Clone + 'static { +pub trait Container: Default { /// The type of elements when reading non-destructively from the container. type ItemRef<'a> where Self: 'a; @@ -50,13 +49,13 @@ pub trait Container: Default + Clone + 'static { fn clear(&mut self); /// Iterator type when reading from the container. - type Iter<'a>: Iterator>; + type Iter<'a>: Iterator> where Self: 'a; /// Returns an iterator that reads the contents of this container. fn iter(&self) -> Self::Iter<'_>; /// Iterator type when draining the container. - type DrainIter<'a>: Iterator>; + type DrainIter<'a>: Iterator> where Self: 'a; /// Returns an iterator that drains the contents of this container. /// Drain leaves the container in an undefined state. @@ -104,7 +103,7 @@ pub trait PushInto { /// decide to represent a push order for `extract` and `finish`, or not. pub trait ContainerBuilder: Default + 'static { /// The container type we're building. - type Container: Container; + type Container: Container + Clone + 'static; /// Extract assembled containers, potentially leaving unfinished data behind. Can /// be called repeatedly, for example while the caller can send data. /// @@ -160,7 +159,7 @@ impl> PushInto for CapacityContainerBuil } } -impl ContainerBuilder for CapacityContainerBuilder { +impl ContainerBuilder for CapacityContainerBuilder { type Container = C; #[inline] @@ -204,7 +203,7 @@ impl CapacityContainerBuilder { } } -impl Container for Vec { +impl Container for Vec { type ItemRef<'a> = &'a T where T: 'a; type Item<'a> = T where T: 'a; @@ -218,20 +217,20 @@ impl Container for Vec { fn clear(&mut self) { Vec::clear(self) } - type Iter<'a> = std::slice::Iter<'a, T>; + type Iter<'a> = std::slice::Iter<'a, T> where Self: 'a; fn iter(&self) -> Self::Iter<'_> { self.as_slice().iter() } - type DrainIter<'a> = std::vec::Drain<'a, T>; + type DrainIter<'a> = std::vec::Drain<'a, T> where Self: 'a; fn drain(&mut self) -> Self::DrainIter<'_> { self.drain(..) } } -impl SizableContainer for Vec { +impl SizableContainer for Vec { fn capacity(&self) -> usize { self.capacity() } @@ -294,13 +293,13 @@ mod rc { } } - type Iter<'a> = T::Iter<'a>; + type Iter<'a> = T::Iter<'a> where Self: 'a; fn iter(&self) -> Self::Iter<'_> { self.deref().iter() } - type DrainIter<'a> = T::Iter<'a>; + type DrainIter<'a> = T::Iter<'a> where Self: 'a; fn drain(&mut self) -> Self::DrainIter<'_> { self.iter() @@ -335,13 +334,13 @@ mod arc { } } - type Iter<'a> = T::Iter<'a>; + type Iter<'a> = T::Iter<'a> where Self: 'a; fn iter(&self) -> Self::Iter<'_> { self.deref().iter() } - type DrainIter<'a> = T::Iter<'a>; + type DrainIter<'a> = T::Iter<'a> where Self: 'a; fn drain(&mut self) -> Self::DrainIter<'_> { self.iter() diff --git a/timely/src/dataflow/channels/pact.rs b/timely/src/dataflow/channels/pact.rs index a744ab078..8ef59598c 100644 --- a/timely/src/dataflow/channels/pact.rs +++ b/timely/src/dataflow/channels/pact.rs @@ -35,7 +35,7 @@ pub trait ParallelizationContract { #[derive(Debug)] pub struct Pipeline; -impl ParallelizationContract for Pipeline { +impl ParallelizationContract for Pipeline { type Pusher = LogPusher>>; type Puller = LogPuller>>; fn connect(self, allocator: &mut A, identifier: usize, address: Rc<[usize]>, logging: Option) -> (Self::Pusher, Self::Puller) { diff --git a/timely/src/dataflow/channels/pushers/buffer.rs b/timely/src/dataflow/channels/pushers/buffer.rs index 48feb14ab..4ea872fb7 100644 --- a/timely/src/dataflow/channels/pushers/buffer.rs +++ b/timely/src/dataflow/channels/pushers/buffer.rs @@ -6,7 +6,7 @@ use crate::container::{ContainerBuilder, CapacityContainerBuilder, PushInto}; use crate::dataflow::channels::Message; use crate::dataflow::operators::Capability; use crate::progress::Timestamp; -use crate::Container; +use crate::{Container, Data}; /// Buffers data sent at the same time, for efficient communication. /// @@ -44,7 +44,7 @@ impl Buffer { } } -impl>> Buffer, P> where T: Eq+Clone { +impl>> Buffer, P> where T: Eq+Clone { /// Returns a `Session`, which accepts data to send at the associated time #[inline] pub fn session(&mut self, time: &T) -> Session, P> { @@ -133,7 +133,7 @@ pub struct Session<'a, T, CB, P> { buffer: &'a mut Buffer, } -impl<'a, T, C: Container, P> Session<'a, T, CapacityContainerBuilder, P> +impl<'a, T, C: Container + Data, P> Session<'a, T, CapacityContainerBuilder, P> where T: Eq + Clone + 'a, P: Push> + 'a, diff --git a/timely/src/dataflow/channels/pushers/tee.rs b/timely/src/dataflow/channels/pushers/tee.rs index 8b7a4b906..e9251c24a 100644 --- a/timely/src/dataflow/channels/pushers/tee.rs +++ b/timely/src/dataflow/channels/pushers/tee.rs @@ -17,7 +17,7 @@ pub struct Tee { shared: PushList, } -impl Push> for Tee { +impl Push> for Tee { #[inline] fn push(&mut self, message: &mut Option>) { let mut pushers = self.shared.borrow_mut(); diff --git a/timely/src/dataflow/operators/branch.rs b/timely/src/dataflow/operators/branch.rs index 8b4ee8674..c3865cb2b 100644 --- a/timely/src/dataflow/operators/branch.rs +++ b/timely/src/dataflow/operators/branch.rs @@ -92,7 +92,7 @@ pub trait BranchWhen: Sized { fn branch_when(&self, condition: impl Fn(&T) -> bool + 'static) -> (Self, Self); } -impl BranchWhen for StreamCore { +impl BranchWhen for StreamCore { fn branch_when(&self, condition: impl Fn(&S::Timestamp) -> bool + 'static) -> (Self, Self) { let mut builder = OperatorBuilder::new("Branch".to_owned(), self.scope()); diff --git a/timely/src/dataflow/operators/core/capture/capture.rs b/timely/src/dataflow/operators/core/capture/capture.rs index 28df65208..82bef229a 100644 --- a/timely/src/dataflow/operators/core/capture/capture.rs +++ b/timely/src/dataflow/operators/core/capture/capture.rs @@ -10,14 +10,14 @@ use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::channels::pullers::Counter as PullCounter; use crate::dataflow::operators::generic::builder_raw::OperatorBuilder; -use crate::Container; +use crate::{Container, Data}; use crate::progress::ChangeBatch; use crate::progress::Timestamp; use super::{Event, EventPusher}; /// Capture a stream of timestamped data for later replay. -pub trait Capture { +pub trait Capture { /// Captures a stream of timestamped data for later replay. /// /// # Examples @@ -113,7 +113,7 @@ pub trait Capture { } } -impl Capture for StreamCore { +impl Capture for StreamCore { fn capture_into+'static>(&self, mut event_pusher: P) { let mut builder = OperatorBuilder::new("Capture".to_owned(), self.scope()); diff --git a/timely/src/dataflow/operators/core/capture/replay.rs b/timely/src/dataflow/operators/core/capture/replay.rs index 1c101f0dd..74a94c36f 100644 --- a/timely/src/dataflow/operators/core/capture/replay.rs +++ b/timely/src/dataflow/operators/core/capture/replay.rs @@ -62,7 +62,7 @@ pub trait Replay : Sized { fn replay_core>(self, scope: &mut S, period: Option) -> StreamCore; } -impl Replay for I +impl Replay for I where I : IntoIterator, ::Item: EventIterator+'static, diff --git a/timely/src/dataflow/operators/core/concat.rs b/timely/src/dataflow/operators/core/concat.rs index ac191be17..89848f689 100644 --- a/timely/src/dataflow/operators/core/concat.rs +++ b/timely/src/dataflow/operators/core/concat.rs @@ -1,7 +1,7 @@ //! Merges the contents of multiple streams. -use crate::Container; +use crate::{Container, Data}; use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::{StreamCore, Scope}; @@ -23,7 +23,7 @@ pub trait Concat { fn concat(&self, _: &StreamCore) -> StreamCore; } -impl Concat for StreamCore { +impl Concat for StreamCore { fn concat(&self, other: &StreamCore) -> StreamCore { self.scope().concatenate([self.clone(), other.clone()]) } @@ -52,7 +52,7 @@ pub trait Concatenate { I: IntoIterator>; } -impl Concatenate for StreamCore { +impl Concatenate for StreamCore { fn concatenate(&self, sources: I) -> StreamCore where I: IntoIterator> @@ -62,7 +62,7 @@ impl Concatenate for StreamCore { } } -impl Concatenate for G { +impl Concatenate for G { fn concatenate(&self, sources: I) -> StreamCore where I: IntoIterator> diff --git a/timely/src/dataflow/operators/core/enterleave.rs b/timely/src/dataflow/operators/core/enterleave.rs index 9ecf90c5e..16464a7f6 100644 --- a/timely/src/dataflow/operators/core/enterleave.rs +++ b/timely/src/dataflow/operators/core/enterleave.rs @@ -103,7 +103,7 @@ pub trait Leave { fn leave(&self) -> StreamCore; } -impl> Leave for StreamCore, C> { +impl> Leave for StreamCore, C> { fn leave(&self) -> StreamCore { let scope = self.scope(); @@ -130,14 +130,14 @@ impl> Leave, TContainer: Container> { +struct IngressNub, TContainer: Container + Data> { targets: Counter>, phantom: ::std::marker::PhantomData, activator: crate::scheduling::Activator, active: bool, } -impl, TContainer: Container> Push> for IngressNub { +impl, TContainer: Container + Data> Push> for IngressNub { fn push(&mut self, element: &mut Option>) { if let Some(outer_message) = element { let data = ::std::mem::take(&mut outer_message.data); diff --git a/timely/src/dataflow/operators/core/feedback.rs b/timely/src/dataflow/operators/core/feedback.rs index b0e131dd3..e008b3029 100644 --- a/timely/src/dataflow/operators/core/feedback.rs +++ b/timely/src/dataflow/operators/core/feedback.rs @@ -1,6 +1,6 @@ //! Create cycles in a timely dataflow graph. -use crate::Container; +use crate::{Container, Data}; use crate::container::CapacityContainerBuilder; use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::channels::pushers::Tee; @@ -36,7 +36,7 @@ pub trait Feedback { /// .connect_loop(handle); /// }); /// ``` - fn feedback(&mut self, summary: ::Summary) -> (Handle, StreamCore); + fn feedback(&mut self, summary: ::Summary) -> (Handle, StreamCore); } /// Creates a `StreamCore` and a `Handle` to later bind the source of that `StreamCore`. @@ -64,12 +64,12 @@ pub trait LoopVariable<'a, G: Scope, T: Timestamp> { /// }); /// }); /// ``` - fn loop_variable(&mut self, summary: T::Summary) -> (Handle, C>, StreamCore, C>); + fn loop_variable(&mut self, summary: T::Summary) -> (Handle, C>, StreamCore, C>); } impl Feedback for G { - fn feedback(&mut self, summary: ::Summary) -> (Handle, StreamCore) { + fn feedback(&mut self, summary: ::Summary) -> (Handle, StreamCore) { let mut builder = OperatorBuilder::new("Feedback".to_owned(), self.clone()); let (output, stream) = builder.new_output(); @@ -79,13 +79,13 @@ impl Feedback for G { } impl<'a, G: Scope, T: Timestamp> LoopVariable<'a, G, T> for Iterative<'a, G, T> { - fn loop_variable(&mut self, summary: T::Summary) -> (Handle, C>, StreamCore, C>) { + fn loop_variable(&mut self, summary: T::Summary) -> (Handle, C>, StreamCore, C>) { self.feedback(Product::new(Default::default(), summary)) } } /// Connect a `Stream` to the input of a loop variable. -pub trait ConnectLoop { +pub trait ConnectLoop { /// Connect a `Stream` to be the input of a loop variable. /// /// # Examples @@ -106,7 +106,7 @@ pub trait ConnectLoop { fn connect_loop(&self, handle: Handle); } -impl ConnectLoop for StreamCore { +impl ConnectLoop for StreamCore { fn connect_loop(&self, handle: Handle) { let mut builder = handle.builder; @@ -131,7 +131,7 @@ impl ConnectLoop for StreamCore { /// A handle used to bind the source of a loop variable. #[derive(Debug)] -pub struct Handle { +pub struct Handle { builder: OperatorBuilder, summary: ::Summary, output: OutputWrapper, Tee>, diff --git a/timely/src/dataflow/operators/core/filter.rs b/timely/src/dataflow/operators/core/filter.rs index c9f5ce305..80ef564b0 100644 --- a/timely/src/dataflow/operators/core/filter.rs +++ b/timely/src/dataflow/operators/core/filter.rs @@ -1,5 +1,6 @@ //! Filters a stream by a predicate. use crate::container::{Container, SizableContainer, PushInto}; +use crate::Data; use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::{Scope, StreamCore}; use crate::dataflow::operators::generic::operator::Operator; @@ -22,7 +23,7 @@ pub trait Filter { fn filter)->bool+'static>(&self, predicate: P) -> Self; } -impl Filter for StreamCore +impl Filter for StreamCore where for<'a> C: PushInto> { diff --git a/timely/src/dataflow/operators/core/input.rs b/timely/src/dataflow/operators/core/input.rs index 78ee65764..a1af533c2 100644 --- a/timely/src/dataflow/operators/core/input.rs +++ b/timely/src/dataflow/operators/core/input.rs @@ -11,7 +11,7 @@ use crate::progress::frontier::Antichain; use crate::progress::{Operate, operate::SharedProgress, Timestamp, ChangeBatch}; use crate::progress::Source; -use crate::Container; +use crate::{Container, Data}; use crate::communication::Push; use crate::dataflow::{Scope, ScopeParent, StreamCore}; use crate::dataflow::channels::pushers::{Tee, Counter}; @@ -60,7 +60,7 @@ pub trait Input : Scope { /// } /// }); /// ``` - fn new_input(&mut self) -> (Handle<::Timestamp, CapacityContainerBuilder>, StreamCore); + fn new_input(&mut self) -> (Handle<::Timestamp, CapacityContainerBuilder>, StreamCore); /// Create a new [StreamCore] and [Handle] through which to supply input. /// @@ -135,7 +135,7 @@ pub trait Input : Scope { use crate::order::TotalOrder; impl Input for G where ::Timestamp: TotalOrder { - fn new_input(&mut self) -> (Handle<::Timestamp, CapacityContainerBuilder>, StreamCore) { + fn new_input(&mut self) -> (Handle<::Timestamp, CapacityContainerBuilder>, StreamCore) { let mut handle = Handle::new(); let stream = self.input_from(&mut handle); (handle, stream) @@ -225,7 +225,7 @@ pub struct Handle { now_at: T, } -impl Handle> { +impl Handle> { /// Allocates a new input handle, from which one can create timely streams. /// /// # Examples diff --git a/timely/src/dataflow/operators/core/inspect.rs b/timely/src/dataflow/operators/core/inspect.rs index 4b6fa830a..b8c41f97b 100644 --- a/timely/src/dataflow/operators/core/inspect.rs +++ b/timely/src/dataflow/operators/core/inspect.rs @@ -1,6 +1,6 @@ //! Extension trait and implementation for observing and action on streamed data. -use crate::Container; +use crate::{Container, Data}; use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::{Scope, StreamCore}; use crate::dataflow::operators::generic::Operator; @@ -90,7 +90,7 @@ pub trait Inspect: InspectCore + Sized { fn inspect_core(&self, func: F) -> Self where F: FnMut(Result<(&G::Timestamp, &C), &[G::Timestamp]>)+'static; } -impl Inspect for StreamCore { +impl Inspect for StreamCore { fn inspect_core(&self, func: F) -> Self where F: FnMut(Result<(&G::Timestamp, &C), &[G::Timestamp]>) + 'static { self.inspect_container(func) } @@ -120,7 +120,7 @@ pub trait InspectCore { fn inspect_container(&self, func: F) -> StreamCore where F: FnMut(Result<(&G::Timestamp, &C), &[G::Timestamp]>)+'static; } -impl InspectCore for StreamCore { +impl InspectCore for StreamCore { fn inspect_container(&self, mut func: F) -> StreamCore where F: FnMut(Result<(&G::Timestamp, &C), &[G::Timestamp]>)+'static diff --git a/timely/src/dataflow/operators/core/map.rs b/timely/src/dataflow/operators/core/map.rs index 6262d2974..8af70e4a4 100644 --- a/timely/src/dataflow/operators/core/map.rs +++ b/timely/src/dataflow/operators/core/map.rs @@ -1,6 +1,7 @@ //! Extension methods for `StreamCore` based on record-by-record transformation. use crate::container::{Container, SizableContainer, PushInto}; +use crate::Data; use crate::dataflow::{Scope, StreamCore}; use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::operators::generic::operator::Operator; @@ -23,7 +24,7 @@ pub trait Map { /// ``` fn map(&self, mut logic: L) -> StreamCore where - C2: SizableContainer + PushInto, + C2: SizableContainer + PushInto + Data, L: FnMut(C::Item<'_>)->D2 + 'static, { self.flat_map(move |x| std::iter::once(logic(x))) @@ -45,19 +46,19 @@ pub trait Map { fn flat_map(&self, logic: L) -> StreamCore where I: IntoIterator, - C2: SizableContainer + PushInto, + C2: SizableContainer + PushInto + Data, L: FnMut(C::Item<'_>)->I + 'static, ; } -impl Map for StreamCore { +impl Map for StreamCore { // TODO : This would be more robust if it captured an iterator and then pulled an appropriate // TODO : number of elements from the iterator. This would allow iterators that produce many // TODO : records without taking arbitrarily long and arbitrarily much memory. fn flat_map(&self, mut logic: L) -> StreamCore where I: IntoIterator, - C2: SizableContainer + PushInto, + C2: SizableContainer + PushInto + Data, L: FnMut(C::Item<'_>)->I + 'static, { self.unary(Pipeline, "FlatMap", move |_,_| move |input, output| { diff --git a/timely/src/dataflow/operators/core/ok_err.rs b/timely/src/dataflow/operators/core/ok_err.rs index c3c1fa6b8..fd7887053 100644 --- a/timely/src/dataflow/operators/core/ok_err.rs +++ b/timely/src/dataflow/operators/core/ok_err.rs @@ -1,6 +1,7 @@ //! Operators that separate one stream into two streams based on some condition use crate::container::{Container, SizableContainer, PushInto}; +use crate::Data; use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::operators::generic::builder_rc::OperatorBuilder; use crate::dataflow::{Scope, StreamCore}; @@ -32,20 +33,20 @@ pub trait OkErr { logic: L, ) -> (StreamCore, StreamCore) where - C1: SizableContainer + PushInto, - C2: SizableContainer + PushInto, + C1: SizableContainer + PushInto + Data, + C2: SizableContainer + PushInto + Data, L: FnMut(C::Item<'_>) -> Result+'static ; } -impl OkErr for StreamCore { +impl OkErr for StreamCore { fn ok_err( &self, mut logic: L, ) -> (StreamCore, StreamCore) where - C1: SizableContainer + PushInto, - C2: SizableContainer + PushInto, + C1: SizableContainer + PushInto + Data, + C2: SizableContainer + PushInto + Data, L: FnMut(C::Item<'_>) -> Result+'static { let mut builder = OperatorBuilder::new("OkErr".to_owned(), self.scope()); diff --git a/timely/src/dataflow/operators/core/probe.rs b/timely/src/dataflow/operators/core/probe.rs index 1c8d9750c..c7fdced33 100644 --- a/timely/src/dataflow/operators/core/probe.rs +++ b/timely/src/dataflow/operators/core/probe.rs @@ -13,7 +13,7 @@ use crate::dataflow::operators::generic::builder_raw::OperatorBuilder; use crate::dataflow::{StreamCore, Scope}; -use crate::Container; +use crate::{Container, Data}; /// Monitors progress at a `Stream`. pub trait Probe { @@ -79,7 +79,7 @@ pub trait Probe { fn probe_with(&self, handle: &Handle) -> StreamCore; } -impl Probe for StreamCore { +impl Probe for StreamCore { fn probe(&self) -> Handle { // the frontier is shared state; scope updates, handle reads. diff --git a/timely/src/dataflow/operators/core/rc.rs b/timely/src/dataflow/operators/core/rc.rs index af73befbe..fdc68b9d4 100644 --- a/timely/src/dataflow/operators/core/rc.rs +++ b/timely/src/dataflow/operators/core/rc.rs @@ -3,7 +3,7 @@ use crate::dataflow::channels::pact::Pipeline; use crate::dataflow::operators::Operator; use crate::dataflow::{Scope, StreamCore}; -use crate::Container; +use crate::{Container, Data}; use std::rc::Rc; /// Convert a stream into a stream of shared containers @@ -24,7 +24,7 @@ pub trait SharedStream { fn shared(&self) -> StreamCore>; } -impl SharedStream for StreamCore { +impl SharedStream for StreamCore { fn shared(&self) -> StreamCore> { self.unary(Pipeline, "Shared", move |_, _| { move |input, output| { diff --git a/timely/src/dataflow/operators/core/reclock.rs b/timely/src/dataflow/operators/core/reclock.rs index 86ec5ae69..e74bedbc4 100644 --- a/timely/src/dataflow/operators/core/reclock.rs +++ b/timely/src/dataflow/operators/core/reclock.rs @@ -1,6 +1,6 @@ //! Extension methods for `Stream` based on record-by-record transformation. -use crate::Container; +use crate::{Container, Data}; use crate::order::PartialOrder; use crate::dataflow::{Scope, StreamCore}; use crate::dataflow::channels::pact::Pipeline; @@ -45,11 +45,11 @@ pub trait Reclock { /// assert_eq!(extracted[1], (5, vec![4,5])); /// assert_eq!(extracted[2], (8, vec![6,7,8])); /// ``` - fn reclock(&self, clock: &StreamCore) -> Self; + fn reclock(&self, clock: &StreamCore) -> Self; } -impl Reclock for StreamCore { - fn reclock(&self, clock: &StreamCore) -> StreamCore { +impl Reclock for StreamCore { + fn reclock(&self, clock: &StreamCore) -> StreamCore { let mut stash = vec![]; diff --git a/timely/src/dataflow/operators/core/to_stream.rs b/timely/src/dataflow/operators/core/to_stream.rs index 69b270f59..cfc6f429b 100644 --- a/timely/src/dataflow/operators/core/to_stream.rs +++ b/timely/src/dataflow/operators/core/to_stream.rs @@ -1,7 +1,7 @@ //! Conversion to the `StreamCore` type from iterators. use crate::container::{CapacityContainerBuilder, ContainerBuilder, SizableContainer, PushInto}; -use crate::Container; +use crate::{Container, Data}; use crate::dataflow::operators::generic::operator::source; use crate::dataflow::{StreamCore, Scope}; @@ -81,7 +81,7 @@ pub trait ToStream { fn to_stream(self, scope: &mut S) -> StreamCore; } -impl ToStream for I where C: PushInto { +impl ToStream for I where C: PushInto { fn to_stream(self, scope: &mut S) -> StreamCore { ToStreamBuilder::>::to_stream_with_builder(self, scope) } diff --git a/timely/src/dataflow/operators/core/unordered_input.rs b/timely/src/dataflow/operators/core/unordered_input.rs index 48eb32677..aacd89ca9 100644 --- a/timely/src/dataflow/operators/core/unordered_input.rs +++ b/timely/src/dataflow/operators/core/unordered_input.rs @@ -2,7 +2,7 @@ use std::rc::Rc; use std::cell::RefCell; -use crate::Container; +use crate::{Container, Data}; use crate::container::{ContainerBuilder, CapacityContainerBuilder}; use crate::scheduling::{Schedule, ActivateOnDrop}; @@ -165,7 +165,7 @@ impl UnorderedHandle { } } -impl UnorderedHandle> { +impl UnorderedHandle> { /// Allocates a new automatically flushing session based on the supplied capability. #[inline] pub fn session(&mut self, cap: ActivateCapability) -> ActivateOnDrop, Counter>>> { diff --git a/timely/src/dataflow/operators/generic/handles.rs b/timely/src/dataflow/operators/generic/handles.rs index 1eb0cec07..47811a659 100644 --- a/timely/src/dataflow/operators/generic/handles.rs +++ b/timely/src/dataflow/operators/generic/handles.rs @@ -15,7 +15,7 @@ use crate::dataflow::channels::pushers::Counter as PushCounter; use crate::dataflow::channels::pushers::buffer::{Buffer, Session}; use crate::dataflow::channels::Message; use crate::communication::{Push, Pull}; -use crate::Container; +use crate::{Container, Data}; use crate::container::{ContainerBuilder, CapacityContainerBuilder}; use crate::logging::TimelyLogger as Logger; @@ -235,7 +235,7 @@ impl<'a, T: Timestamp, CB: ContainerBuilder, P: Push>> } } -impl<'a, T: Timestamp, C: Container, P: Push>> OutputHandleCore<'a, T, CapacityContainerBuilder, P> { +impl<'a, T: Timestamp, C: Container + Data, P: Push>> OutputHandleCore<'a, T, CapacityContainerBuilder, P> { /// Obtains a session that can send data at the timestamp associated with capability `cap`. /// /// In order to send data at a future timestamp, obtain a capability for the new timestamp diff --git a/timely/src/dataflow/operators/generic/operator.rs b/timely/src/dataflow/operators/generic/operator.rs index e28381dfb..247cb5258 100644 --- a/timely/src/dataflow/operators/generic/operator.rs +++ b/timely/src/dataflow/operators/generic/operator.rs @@ -12,7 +12,7 @@ use crate::dataflow::{Scope, StreamCore}; use super::builder_rc::OperatorBuilder; use crate::dataflow::operators::generic::OperatorInfo; use crate::dataflow::operators::generic::notificator::{Notificator, FrontierNotificator}; -use crate::Container; +use crate::{Container, Data}; use crate::container::{ContainerBuilder, CapacityContainerBuilder}; /// Methods to construct generic streaming and blocking operators. @@ -181,7 +181,7 @@ pub trait Operator { /// ``` fn binary_frontier(&self, other: &StreamCore, pact1: P1, pact2: P2, name: &str, constructor: B) -> StreamCore where - C2: Container, + C2: Container + Data, CB: ContainerBuilder, B: FnOnce(Capability, OperatorInfo) -> L, L: FnMut(&mut FrontieredInputHandleCore, @@ -231,7 +231,7 @@ pub trait Operator { /// } /// }).unwrap(); /// ``` - fn binary_notify, &mut InputHandleCore, @@ -273,7 +273,7 @@ pub trait Operator { /// ``` fn binary(&self, other: &StreamCore, pact1: P1, pact2: P2, name: &str, constructor: B) -> StreamCore where - C2: Container, + C2: Container + Data, CB: ContainerBuilder, B: FnOnce(Capability, OperatorInfo) -> L, L: FnMut(&mut InputHandleCore, @@ -311,7 +311,7 @@ pub trait Operator { P: ParallelizationContract; } -impl Operator for StreamCore { +impl Operator for StreamCore { fn unary_frontier(&self, pact: P, name: &str, constructor: B) -> StreamCore where @@ -393,7 +393,7 @@ impl Operator for StreamCore { fn binary_frontier(&self, other: &StreamCore, pact1: P1, pact2: P2, name: &str, constructor: B) -> StreamCore where - C2: Container, + C2: Container + Data, CB: ContainerBuilder, B: FnOnce(Capability, OperatorInfo) -> L, L: FnMut(&mut FrontieredInputHandleCore, @@ -424,7 +424,7 @@ impl Operator for StreamCore { stream } - fn binary_notify, &mut InputHandleCore, @@ -453,7 +453,7 @@ impl Operator for StreamCore { fn binary(&self, other: &StreamCore, pact1: P1, pact2: P2, name: &str, constructor: B) -> StreamCore where - C2: Container, + C2: Container + Data, CB: ContainerBuilder, B: FnOnce(Capability, OperatorInfo) -> L, L: FnMut(&mut InputHandleCore, @@ -586,7 +586,7 @@ where /// /// }); /// ``` -pub fn empty(scope: &G) -> StreamCore { +pub fn empty(scope: &G) -> StreamCore { source::<_, CapacityContainerBuilder, _, _>(scope, "Empty", |_capability, _info| |_output| { // drop capability, do nothing }) diff --git a/timely/src/dataflow/stream.rs b/timely/src/dataflow/stream.rs index 92e0f19a1..d071cac45 100644 --- a/timely/src/dataflow/stream.rs +++ b/timely/src/dataflow/stream.rs @@ -19,7 +19,6 @@ use crate::Container; /// /// Internally `Stream` maintains a list of data recipients who should be presented with data /// produced by the source of the stream. -#[derive(Clone)] pub struct StreamCore { /// The progress identifier of the stream's data source. name: Source, @@ -29,6 +28,22 @@ pub struct StreamCore { ports: TeeHelper, } +impl Clone for StreamCore { + fn clone(&self) -> Self { + Self { + name: self.name.clone(), + scope: self.scope.clone(), + ports: self.ports.clone(), + } + } + + fn clone_from(&mut self, source: &Self) { + self.name.clone_from(&source.name); + self.scope.clone_from(&source.scope); + self.ports.clone_from(&source.ports); + } +} + /// A stream batching data in vectors. pub type Stream = StreamCore>;