abstract class Indexed extends Stream[Short] with Stream.Interface.Indexed[Short]
- Alphabetic
- Indexed
- Stream.Interface.Indexed
- Stream.Interface.Preview
- Stream.A.Basic.Defaults
- Stream
- Stream._info
- Stream._iterate
- Stream._consume
- Stream._consume._foreach
- Stream._consume._evaluate
- Stream._consume._convert
- Stream._consume._aggregate
- Stream._extend
- Stream._extend._trigger
- Stream._extend._zip
- Stream._extend._peek
- Stream._extend._order
- Stream._extend._map
- Stream._extend._group
- Stream._extend._flow
- Stream._extend._filter
- Stream._extend._add
- Stream.Flow
- Stream.Flow._info
- Stream.Flow._consume
- Stream.Flow._consume._foreach
- Stream.Flow._consume._evaluate
- Stream.Flow._consume._convert
- Stream.Flow._consume._aggregate
- Stream.Flow._extend
- Stream.Flow._extend._peek
- Stream.Flow._extend._map
- Stream.Flow._extend._flow
- Stream.Flow._extend._filter
- Stream.Flow._extend.Z.Shared
- Stream.Interface
- scala.AnyRef
- scala.Any
- Hide All
- Show All
- Public
- All
Constructor
-
new
Indexed()
- Attributes
- protected
- Definition Classes
- Indexed
Constant
-
val
_position: Int
- Attributes
- protected[scalqa.Stream]
- Definition Classes
- Indexed
Method
-
def
append(element: Short): ~[Short]
Append
-
def
appendAll(that: ~[Short]): ~[Short]
Append all
-
def
apply(c: Consumer[Short], forEmpty: ⇒ Any = ()): Unit
For each or for empty
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
average(implicit n: Numeric[Short]): Short
Average
Average
Computes average
(10 to 15).all.map(_.toFloat).average // Returns 12.5
- Definition Classes
- _Class → _aggregate
-
def
averageFew[B](f: Mapping[Short, B]*)(implicit arg0: Numeric[B]): Seq[B] with Util.Able.Void
Multi average
Multi average
Simultaneously computes multiple average values for properties specified by several functions
Returns Seq with values corresponding to the given mappings
For empty pipelines returned Seq will still hold zero numerics, but will test isVoid==true
(1 to 1000).all.averageFew(v => v, _ * 10, _ * 100).all.lp // Prints ~(500, 5005, 50050)
- Definition Classes
- _Class → _aggregate
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
def
collect[B](f: PartialFunction[Short, B])(implicit arg0: Tag[B]): Stream[B]
Filter and converter
Filter and converter
Only lets elements for which given PartialFinction is defined
The elements are converted to returned type
def all = ~~[Any] + "ABC" + 1 + 22.0 + "DE" + 333F + "F" all.lp // Prints ~(ABC, 1, 22.0, DE, 333.0, F) all.collect{ case s: String if (s.length > 1) => s }.lp // Prints ~(ABC, DE)
Note. collect always requires double evaluation for each element, so letMap is preferred in many cases
-
def
collectOpt[B](f: PartialFunction[Short, B]): Opt[B]
Find first matching option
Find first matching option
The matching element is mapped with the functions
def stream = ~~[Any] + 1 + "ABC" + 22.0 + "DE" + 333F + "F" // Find length of the first matching string stream.collectOpt{ case s: String if (s.length > 1) => s.length }.lp // Prints: Opt(3)
-
def
contains[B >: Short](value: B): Boolean
Includes check
-
def
copyTo(b: Idx.Buffer.Loader[Short]): Unit
Copy to buffer
Copy to buffer
This is potentially the most efficient way to get all Stream elements
Idx.Buffer.Loader provides a trustless way to copy arrays in bulk, so many array based Streams can take advantage of this
-
def
count(f: Filter[Short]): Int
Element conditional count
-
def
count: Int
All elements count
-
def
countAndSeconds: (Int, Double)
Element count and time
Element count and time
Returns total count and time in seconds it took to pump the pipeline from first to last elements
val (count, seconds) = (1 to 1000).all.peek(_ => Thread.sleep(1)).countAndSeconds println("Count = " + count + ", done in " + seconds + " secs") // Output Count = 1000, done in 1.004261423 secs
-
def
countFew(f: Filter[Short]*): Seq[Int]
Element multi count
Element multi count
Counts elements for several filters at once
Returns Seq, where each Int corresponds to the given filter index
val Seq(total, odd, even) = (1 to 50).all.countFew(_ => true, _ % 2 == 1, _ % 2 == 0) println("total=" + total + ", odd=" + odd + ", even=" + even) // Output total=50, odd=25, even=25
- f
several filters
-
def
default(element: ⇒ Short): Stream[Short]
Default element
-
def
drain: Unit
Discharge everything
Discharge everything
Calls foreach, discarding all retrieved elements
Even though nothing is done at this point, this method can be run for the benefit of other functions in the pipeline
('A' to 'C').all.peek(v => println("Process " + v)).drain // Output Process A Process B Process C
-
def
drop(f: Filter[Short]): Stream[Short]
Reversed filter
-
def
dropAll(a: ~[Short])(implicit o: Ordering[Short] = null): Stream[Short]
Group reversed filter
Group reversed filter
Discards all elements equal to the found in that stream
val idx = (1 to 5).all.flatMap(i => Seq(i, i, i)).to[Idx] idx.all.lp // Prints: ~(1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5) idx.all.dropAll(3 to 7).lp // Prints: ~(1, 1, 1, 2, 2, 2) idx.all.dropAll(List(2, 5, 11)).lp // Prints: ~(1, 1, 1, 3, 3, 3, 4, 4, 4)
Note. The operation is very efficient if streams are sorted
-
def
dropAllBy[B](f: Mapping[Short, B], a: ~[B])(implicit o: Ordering[B] = null): Stream[Short]
Property group reversed filter
-
def
dropEverythingIf(b: Boolean): Stream[Short]
Discarding everything
Discarding everything
With a single test lets to drop the entire pipeline in favor of void instance
Note: This can also be done with 'if-else' outside the pipeline, however it proved to be useful with really long statements
(1 to 10).all.dropEverythingIf(true).lp // Prints ~()
-
def
dropLast(n: Int): Stream[Short]
Sequence tail reversed filter
-
def
dropNext(n: Int): Stream[Short]
Sequence head reversed filter
-
def
dropRange(r: Range): ~[Short]
Sequence range reversed filter
-
def
dropWhile(f: Filter[Short]): Stream[Short]
Sequence head reversed filter
Sequence head reversed filter
Discards first consecutive elements satisfying the filter
Note, everything starting from the first non compliant element will be allowed (including later compliant elements)
def all = (1 to 5).all +~ (1 to 5) all.lp // Prints ~(1, 2, 3, 4, 5, 1, 2, 3, 4, 5) all.dropWhile(_ <= 3).lp // Prints ~(4, 5, 1, 2, 3, 4, 5)
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
filter(f: Filter[Short]): Stream[Short]
Same as let
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
find(f: Filter[Short]): Short
Find
- def findAnyOpt: Opt[Short]
-
def
findIdxOpt(f: Filter[Short]): Opt.Int
Find index
-
def
findNextOpt(f: Filter[Short]): Opt[Short]
Find next element
-
def
findOpt(f: Filter[Short]): Opt[Short]
Find
-
def
flatMap[B](f: Mapping[Short, ~[B]])(implicit i: Tag[B]): Stream[B]
Map multiplier
Map multiplier
For every existing element, a mapped stream of elements is inserted into the pipeline
Note. The mapping can return an empty stream, in which case total number of elements might even be reduced
~~(1, 2, 3).flatMap(i => Seq(i * 10, i * 100, i * 1000)).lp // Output ~(10, 100, 1000, 20, 200, 2000, 30, 300, 3000)
- f
function to provide a stream of elements for each existing element
-
def
flatten[B](implicit f: Mapping[Short, ~[B]], i: Tag[B]): Stream[B]
Converts a stream of streams into a flat stream
-
def
fold(start: Short)(op: Folding[Short]): Short
Basic fold
Basic fold
Folds elements with a binary function
Returns
start
value for empty pipeline// Multiply every element by next (1 to 10).all.fold(1)(_ * _) // Returns 3628800
- start
seed value to start with
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
foldAs[B](start: B)(op: Folding.As[B, Short])(implicit i: Tag[B]): B
Fold and convert
Fold and convert
Folds and converts elements with a binary function
// Calculate sum of first 1000 Ints (1 to 1000).all.foldAs[Long](0L)(_ + _) // Returns 500500
- start
seed value to start with
- Definition Classes
- _Class → _aggregate
-
def
foldFlowAs[B](start: B)(op: Folding.As[B, Short], cf: Folding[B] = null)(implicit arg0: Tag[B]): B
foldAs
- start
seed value to start with
- cf
collect function to put together results of parallel computations. It is not required and ignored for Stream
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
foreach(f: Consumer[Short]): Unit
Foreach override
Foreach override
foreach is the most widely used Stream method, which largely determines general performance
Default implementation looks like
def foreach(c: Stream.Consumer[A]) = while (prime) c.accept(pump)
If custom Stream can do better, this method should be overridden
-
def
foreachIdx(f: Consumer.Idx[Short], start: Int = 0): Unit
For each indexed
-
def
format(s: Opt[String] = \/, pb: Opt[String] = \/, pa: Opt[String] = \/, c: Opt[(Short) ⇒ String] = \/): String
Elements as String
Elements as String
All elements are converted toString
The results are concatenated with possible use of padding and separator
('a' to 'j').all.format() // Returns abcdefghij ('a' to 'j').all.format("|") // Returns a|b|c|d|e|f|g|h|i|j ('a' to 'j').all.format(",", "[", "]") // Returns [a],[b],[c],[d],[e],[f],[g],[h],[i],[j]
-
def
group(test: (Short, Short) ⇒ Boolean, peekSplit: (Short, Boolean) ⇒ Any = (_, _) => ()): Stream[~[Short]]
Group by test
Group by test
Puts elements in the same group based on a function test for every two consecutive elements
// Putting Ints into groups of 3 (0 to 20).all.group(_ / 3 == _ / 3).tp // Output --------------- ? --------------- ~(0, 1, 2) ~(3, 4, 5) ~(6, 7, 8) ~(9, 10, 11) ~(12, 13, 14) ~(15, 16, 17) ~(18, 19, 20) ---------------
- test
function for two consecutive elements. if 'false' is returned, the second tested element will start a new group
- peekSplit
function to run for each piped element. Boolean parameter indicates if the element starts a new group
-
def
group: Stream[~[Short]]
Simple grouping
Simple grouping
Puts consecutive elements in the same group if they are
equal
Note: Non consecutive equal elements will end up in different groups. Prior ordering might be needed
def all = ~~(1, 2, 3).flatMap(i => Seq(i, i, i)) all.lp // Prints ~(1, 1, 1, 2, 2, 2, 3, 3, 3) all.group.tp // Prints ------------ ? ------------ ~(1, 1, 1) ~(2, 2, 2) ~(3, 3, 3) ------------
-
def
groupBy(properties: Mapping[Short, Any]*): Stream[~[Short]]
Grouping on properties
Grouping on properties
Puts consecutive elements in the same group if all the specified properties are equal
When properties change, a new group is started
('#' to '|').all.groupBy(_.isLetter, _.isDigit).tp // Output --------------------------------------------------------------------------------- ? --------------------------------------------------------------------------------- ~(#, $, %, &, ', (, ), *, +, ,, -, ., /) ~(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) ~(:, ;, <, =, >, ?, @) ~(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z) ~([, \, ], ^, _, `) ~(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z) ~({, |) ---------------------------------------------------------------------------------
- properties
a set of functions, each indicating an element property
-
def
groupBySize(size: Int): Stream[~[Short]]
Fixed size groups
Fixed size groups
Puts consecutive elements into fixed size groups
('a' to 'z').all.groupBySize(8).tp // Output ------------------------- ? ------------------------- ~(a, b, c, d, e, f, g, h) ~(i, j, k, l, m, n, o, p) ~(q, r, s, t, u, v, w, x) ~(y, z) -------------------------
- size
of groups. Cannot be less than 1.
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
-
def
insert(index: Int, element: Short): Stream[Short]
Insert at
-
def
insertAll(index: Int, that: ~[Short]): Stream[Short]
Insert stream at
-
def
isAny(f: Filter[Short]): Boolean
Any check
-
def
isEvery(f: Filter[Short]): Boolean
Every check
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isParallel: Boolean
Parallel check
-
def
last: Short
Last element
-
def
lastOpt: Opt[Short]
Last element
-
def
let(f: Filter[Short]): Stream[Short]
Main filter
-
def
letAll(that: ~[Short])(implicit o: Ordering[Short] = null): Stream[Short]
Group filter
Group filter
Only lets elements equal to the found in that stream
val idx = (1 to 5).all.flatMap(i => Seq(i,i,i)).to[Idx] idx.all.lp // Prints: ~(1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5) idx.all.letAll(3 to 7).lp // Prints: ~(3, 3, 3, 4, 4, 4, 5, 5, 5) idx.all.letAll(List(2, 5, 11)).lp // Prints: ~(2, 2, 2, 5, 5, 5)
Note. The operation is very efficient if streams are sorted
-
def
letAllBy[B](f: Mapping[Short, B], that: ~[B])(implicit o: Ordering[B] = null): Stream[Short]
Property group filter
-
def
letIdx(f: Filter.Idx[Short], start: Int = 0): Stream[Short]
Indexed filter
-
def
letLast(n: Int): Stream[Short]
Sequence tail filter
-
def
letMap[B](f: Mapping[Short, Opt[B]])(implicit i: Tag[B]): Stream[B]
Filter and converter
Filter and converter
Only lets elements for which given function returns non empty Opt
The elements are converted to the new type
def all = "ABC" ~+ "1" + "22" + "D" + "333" + "E" all.letMap(v => if (v.length < 2) \/ else v).lp // Prints: ~(ABC, 22, 333) all.letMap({ case s if (s.length >= 2) => s case _ => \/ }).lp // Prints: ~(ABC, 22, 333)
Note: letMap is often a faster alternative to collect with PartialFunction, because it is evaluated just once for each element
-
def
letNext(n: Int): Stream[Short]
Sequence head filter
-
def
letRange(r: Range): ~[Short]
Sequence range filter
-
def
letType[B](implicit t: ClassTag[B]): Stream[B]
Filter and type converter
Filter and type converter
Only lets elements, which are instances of the given type
Note, the result is mapped to the specified type
def all = ~~[Any] + "1" + 2 + 3.0 + 4l + "5" all.lp // Prints ~(1, 2, 3.0, 4, 5) all.letType[String].lp // Prints ~(1, 5)
-
def
letWhile(f: Filter[Short]): Stream[Short]
Sequence head filter
Sequence head filter
Only lets first consecutive elements satisfying the filter
Note, everything starting from the first non compliant element will be discarded (including later compliant elements)
def all = (1 to 5).all +~ (1 to 5) all.lp // Prints ~(1, 2, 3, 4, 5, 1, 2, 3, 4, 5) all.letWhile(_ <= 3).lp // Prints ~(1, 2, 3)
-
def
lp: Unit
Line print
Line print
Is equivalent to:
println(this)
For example:
(1 to 10).all.lp // Prints ~(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
- Definition Classes
- _convert
-
def
map[B](f: Mapping[Short, B])(implicit i: Tag[B]): Stream[B]
Element conversion
-
def
mapCast[B]: Stream[B]
Element cast
-
def
mapIdx[B](f: Mapping.Idx[Short, B], start: Int = 0)(implicit arg0: Tag[B]): Stream[B]
Indexed element conversion
Indexed element conversion
Converts every element in the pipeline with given function
('A' to 'G').all.mapIdx(_ + "=" + _, 1).lp // Prints ~(1=A, 2=B, 3=C, 4=D, 5=E, 6=F, 7=G)
- f
the conversion function which also accepts element index in the sequence
- start
the starting value of indexing
-
def
max(implicit c: Ordering[Short]): Short
Largest
Largest
Selects maximum element, based on the Ordering
Fails for empty stream
~~(4, 3, 12, 7).max // Returns 12
- Definition Classes
- _Class → _aggregate
-
def
maxBy[B](f: Mapping[Short, B])(implicit arg0: Ordering[B]): Short
Largest by property
Largest by property
Selects maximum element, based on mapping
Fails for empty stream
~~("AA", "B", "CCCC", "DDD").maxBy(_.length) // Returns CCCC
- Definition Classes
- _Class → _aggregate
-
def
maxByOpt[B](f: Mapping[Short, B])(implicit arg0: Ordering[B]): Opt[Short]
Largest by property option
Largest by property option
Selects maximum element, based on mapping
~~("AA", "B", "CCCC", "DDD").maxByOpt(_.length).lp // Returns Opt(CCCC)
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
maxOpt(implicit o: Ordering[Short]): Opt[Short]
Largest
Largest
Selects maximum element, based on the Ordering
~~(4, 3, 12, 7).maxOpt // Returns Opt(12)
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
min(implicit c: Ordering[Short]): Short
Smallest
Smallest
Selects minimal element, based on the Ordering
Fails for empty stream
~~(4, 3, 12, 7).min // Returns 3
- Definition Classes
- _Class → _aggregate
-
def
minBy[B](f: Mapping[Short, B])(implicit arg0: Ordering[B]): Short
Smallest by property
Smallest by property
Selects minimal element, based on mapping
Fails for empty stream
~~("AA", "B", "CCCC", "DDD").minBy(_.length) // Returns B
- Definition Classes
- _Class → _aggregate
-
def
minByOpt[B](f: Mapping[Short, B])(implicit arg0: Ordering[B]): Opt[Short]
Smallest by property option
Smallest by property option
Selects minimal element, based on mapping
~~("AA", "B", "CCCC", "DDD").minByOpt(_.length) // Returns Opt(B)
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
minOpt(implicit o: Ordering[Short]): Opt[Short]
Smallest
Smallest
Selects minimal element, based on the Ordering
~~(4, 3, 12, 7).minOpt // Returns Opt(3)
- Definition Classes
- _Class → _aggregate → _aggregate
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
next: Short
Next element
-
def
nextOpt: Opt[Short]
Next element
-
def
noindex: Stream[Short]
Loose indexing optimizations
Loose indexing optimizations
Many streams from indexed sources (like IndexedSeq, Array, Vector, etc) are special. They know their size and can read elements without iteration. They can optimize operations like take, dropNext, letLast, and many others
noindex
turns this privileged Streams into regular and it is needed occasionally for debugging and testing -
def
nosize: Stream[Short]
Loose size information
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
-
def
parallel: Stream.Flow[Short]
Parallel
Parallel
Returns Stream.Flow with parallel execution
Each consecutive element will be sent to a new thread for processing
(1 to 5).all.parallel.map("Value: " + _ + "\t" + Thread.currentThread.getName).peek(println).drain // Output Value: 1 ForkJoinPool.commonPool-worker-9 Value: 3 ForkJoinPool.commonPool-worker-11 Value: 2 main Value: 4 ForkJoinPool.commonPool-worker-2 Value: 5 ForkJoinPool.commonPool-worker-4
-
def
parallelIf(boolean: Boolean): Stream.Flow[Short]
Conditionally parallel
Conditionally parallel
Switches to parallel execution if boolean parameter == true
Returns Stream.Flow, which could be implemented as sequential Stream or parallel Stream.Flow
(1 to 50).all.parallelIf(true).isParallel // Returns true (1 to 50).all.parallelIf(false).isParallep // Returns false
-
def
parallelIfOver(threshold: Int): Stream.Flow[Short]
Conditionally parallel
Conditionally parallel
Switches to parallel execution if number of elements exceeds threshold
Returns Stream.Flow, which could be implemented as sequential Stream or parallel Stream.Flow
(1 to 50).all.parallelIfOver(100).isParallel // Returns false (1 to 200).all.parallelIfOver(100).isParallel // Returns true
-
def
partition(groupFilters: Filter[Short]*): Stream[~[Short]]
Multi-filter grouping
Multi-filter grouping
All stream elements are grouped by specified filters
Filters are applied in sequence, thus if an element is accepted into a group, it will not be evaluated by the rest of the filters
If there are elements, which are left without a group, one extra group is created
// Age groups (1 to 80).all.partition(_ <= 12, 13 to 19, _ < 30, 30 to 40, _ < 50, _ < 65).tp // Output ------------------------------------------------------------------- ? ------------------------------------------------------------------- ~(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) ~(13, 14, 15, 16, 17, 18, 19) ~(20, 21, 22, 23, 24, 25, 26, 27, 28, 29) ~(30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40) ~(41, 42, 43, 44, 45, 46, 47, 48, 49) ~(50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64) ~(65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80) -------------------------------------------------------------------
- groupFilters
a set of filters, to specify groups
-
def
peek(f: Consumer[Short]): Stream[Short]
Element access
-
def
peekIdx(f: Consumer.Idx[Short], start: Int = 0): Stream[Short]
Indexed element access
Indexed element access
Provides access to passing pipeline elements with their index in sequence
('A' to 'C').all.peekIdx((i, v) => println("Peek " + i + " = " + v), 1).count // Returns 3 // Output Peek 1 = A Peek 2 = B Peek 3 = C
- start
starting value for element indexing
-
def
preview(cnt: Int): ~[Short]
Preview multiple elements
-
def
preview: Stream[Short] with Stream.Interface.Preview[Short]
Adds preview capabilities
Adds preview capabilities
Returns Interface.Preview, which allows to pre-load and inspect elements, even before they go through Stream
-
def
previewNextOpt: Opt[Short]
Preview next element
-
def
previewSize: Int
Preview element count
-
def
previewSizeAtLeast(cnt: Int): Boolean
Checks size up to a limit
-
def
prime(cnt: Int): Int
Requests to get several elements ready
-
def
prime: Boolean
- Definition Classes
- Indexed
-
def
primeAll: Int
Requests to get all elements ready
-
def
pump: Short
Pump next element
-
def
range(implicit c: Ordering[Short]): Range[Short]
Element Range
Element Range
Selects min and max elements (according to the Ordering ), and returns result as Util.Range
Note. Range is void for empty pipelines.
(~~[Int] + 4 + 1 + 12 + 7).range // Returns range from 1 to 12
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
reduce(op: Folding[Short]): Short
Reduces elements with a Folding functions
Reduces elements with a Folding functions
Will fail for empty Stream
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
reduceOpt(op: Folding[Short]): Opt[Short]
Reduces elements with a Folding functions
Reduces elements with a Folding functions
Returns Opt.Void for for empty Stream
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
reverse: Stream[Short]
Reverse order
-
def
reverseSized(size: Int): Stream[Short]
Reverse order in segments
Reverse order in segments
Reverses order of elements within segments of fixed size
Use Case: Predefined Shuffle
For testing it is often needed to get elements in random order. However it cannot be completely random, if we want to replicate the bug
reverseSized can shuffle elements in a predefined order, given same group size
(1 to 15).all.reverseSized(5).lp // Prints ~(5, 4, 3, 2, 1, 10, 9, 8, 7, 6, 15, 14, 13, 12, 11)
-
def
sequential: Stream[Short]
Restores potentially parallel Flow back to Stream
If this is already a Stream, the operation is instant, returning this
Otherwise the operation is quite expensive
In many cases it is advisable to consume pipeline as Flow instead of converting to Stream
val (count, millis) = (1 to 1000).all .parallel // Switching to parallel Stream.Flow .peek(_ => Thread.sleep(1)) // Expensive operation .sequential // Back to Stream .countAndMillis println("Count = " + count + ", done in " + millis / 1000F + " secs") // Output Count = 1000, done in 0.224 secs // Note: We have 1000 elements each pausing for 1 millis. // Without parallel processing total time would be over 1 second
-
def
shuffle: Stream[Short]
Randomize order
-
def
sizeOpt: Opt.Int
Current size option
-
def
sliding(size: Int, step: Int = 1): Stream[~[Short]]
Sliding group view
Sliding group view
Example: group size 3 with step 1
('a' to 'g').all.sliding(3).tp // Output ---------- ? ---------- ~(a, b, c) ~(b, c, d) ~(c, d, e) ~(d, e, f) ~(e, f, g) ----------
Example: group size 4 with step 2
('a' to 'g').all.sliding(4,2).tp // Output ------------- ? ------------- ~(a, b, c, d) ~(c, d, e, f) ~(e, f, g) -------------
-
def
sort(implicit c: Ordering[Short]): Stream[Short]
Sort
- def sort(c: Comparator[Short]): ~[Short]
-
def
sortBy[B, C, D](f1: Mapping[Short, B], f2: Mapping[Short, C], f3: Mapping[Short, D])(implicit arg0: Ordering[B], arg1: Ordering[C], arg2: Ordering[D]): Stream[Short]
Sort by three properties
-
def
sortBy[B, C](f1: Mapping[Short, B], f2: Mapping[Short, C])(implicit arg0: Ordering[B], arg1: Ordering[C]): Stream[Short]
Sort by two properties
-
def
sortBy[B](f: Mapping[Short, B])(implicit c: Ordering[B]): Stream[Short]
Sort by property
-
def
sortedOpt: Opt[Comparator[Short]]
Current sorted option
Current sorted option
By default sortedOpt returns Opt.Void
If custom Stream is sorted, return the Ordering
- Definition Classes
- Defaults
-
def
sortReversed(implicit c: Ordering[Short]): Stream[Short]
Sort reversed
-
def
sum(implicit n: Numeric[Short]): Short
Sum
Sum
Computes sum value of all elements
(0 to 1000).all.sum // Returns: 500500
- Definition Classes
- _Class → _aggregate → _aggregate
-
def
sumFew[B](f: Mapping[Short, B]*)(implicit arg0: Numeric[B]): Seq[B] with Util.Able.Void
Multi sum
Multi sum
Simultaneously computes multiple sum values for properties specified by several functions
Returns Seq, with values corresponding to the given mappings
For empty pipelines returned Seq will still hold zero numerics, but will test isVoid==true
(1 to 1000).all.sumFew(v => v, _ * 10, _ * 100).all.lp // Prints ~(500500, 5005000, 50050000)
- Definition Classes
- _Class → _aggregate
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
take(cnt: Int): Stream[Short]
Group iterate
Group iterate
Iterates through several elements at once and returns them as different Stream
Note. letNext is somewhat similar and more efficient, because it is lazy
val s : ~[Int] = 1 to 30 s.take(5).lp // Prints ~(1, 2, 3, 4, 5) s.take(12).lp // Prints ~(6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17) s.take(7).lp // Prints ~(18, 19, 20, 21, 22, 23, 24) // Print leftovers s.lp // Prints ~(25, 26, 27, 28, 29, 30)
-
def
takeWhile(f: Filter[Short]): ~[Short]
Stream while
-
def
to[TRGT[_]](implicit c: Stream.Interface.To[TRGT]): TRGT[Short]
Convert to type
Convert to type
Converts pipeline elements to the specified target type
Available targets are defined in Stream.Interface.To$
- Idx
- Idx.Immutable
- Idx.Buffer
- Refs
- scala.List
- scala.Seq
- scala.IndexedSeq
- scala.Iterator
- scala.Vector
- scala.collection.mutable.Buffer
- java.util.Collection
- java.util.List
- java.util.Iterator
- java.util.Spliterator
- java.util.stream.Stream
~~("1", "2", "3").to[Idx] // Returns: Idx[String] ('A' to 'D').all.to[List] // Returns: scala.List[Char] (1 to 5).all.to[java.util.List] // Returns: java.util.List[Int]
New target conversions can be implemented by creating implicit object extending Stream.Interface.To
-
def
toArray(implicit ct: ClassTag[Short]): Array[Short]
Convert to Array
-
def
toInfo: Util.Info
Metadata as String
Metadata as String
val info = ('a' to 'z').all.map(_.toUpper).toInfo println(info) // Prints scalqa.Stream.Z.extend.map.map$rawRaw$9{type=Chars,size=26}
- Definition Classes
- _Trait
-
def
toInfoTree: Util.Info.Tree
Metadata for all transformations
Metadata for all transformations
val tree = ('a' to 'z').all.let(_ > 'X').map(_.toUpper).toInfoTree println(tree) // Output scalqa.Stream.Z.extend.map.map$rawRaw$9{type=Chars} scalqa.Stream.Z.extend.filter.let$$anon$2{type=Chars} scalqa.Stream.Z.adapt.IndexedSeq$$anon$2{type=Chars,size=26,indexed}
- Definition Classes
- _Trait
-
def
toMemory: ~[Short]
New buffered stream
-
def
toRaw[TRGT](implicit c: Stream.Interface.ToRaw[Short, TRGT]): TRGT
Convert to immutable collection
-
def
toString(): String
Elements as String
-
def
toString(name: String): String
Elements as String
Elements as String
Returns String starting with given
name
and containing all elements separated by ", "(1 to 5).all.toString("My Ints") // Returns My Ints(1, 2, 3, 4, 5)
- Definition Classes
- _convert
-
def
toText: String
Elements as multi-line String
Elements as multi-line String
Returns all elements as String formatted table
If elements implement Util.Able.ToInfo, each 'info' property value is placed in a different column
If elements implement
scala.Product
(like all Tuples), each Product element is placed in a different column('a' to 'e').all.map(v => (v + "1", v + "2", v + "3", v + "4", v + "5")) tp // Output -- -- -- -- -- ? ? ? ? ? -- -- -- -- -- a1 a2 a3 a4 a5 b1 b2 b3 b4 b5 c1 c2 c3 c4 c5 d1 d2 d3 d4 d5 e1 e2 e3 e4 e5 -- -- -- -- --
-
def
tp: Unit
Text print
Text print
Is equivalent to:
println(this.toText)
For example:
('a' to 'd').all.zipIdx.tp // Output - - ? ? - - 0 a 1 b 2 c 3 d - -
- Definition Classes
- _convert
-
def
transpose[B](implicit f: Mapping[Short, ~[B]]): Stream[~[B]]
Transpose
Transpose
Transposes matrix where rows become columns
def all: ~[~[Int]] = ~~(11 to 15, List(21, 22, 23, 24, 25), Vector(31, 32, 33, 34, 35)) all.tp all.transpose.tp // Output --------------------- ? --------------------- ~(11, 12, 13, 14, 15) ~(21, 22, 23, 24, 25) ~(31, 32, 33, 34, 35) --------------------- ------------- ? ------------- ~(11, 21, 31) ~(12, 22, 32) ~(13, 23, 33) ~(14, 24, 34) ~(15, 25, 35) -------------
-
def
triggerEmpty(f: ⇒ Unit): ~[Short]
Run for empty
-
def
triggerEvery(s: Double, f: (Int, Double) ⇒ Unit): ~[Short]
Run on timer
Run on timer
Runs given function every specified time period in fractional seconds, while elements are being pumped
Note, it will not run even once if all elements pumped in less than the given seconds
- f
function with cumulative element count and cumulative time in seconds as arguments
-
def
triggerFirst(f: ⇒ Any): ~[Short]
Runs before first
-
def
triggerLast(f: (Int, Double) ⇒ Unit): ~[Short]
Runs after last
-
def
typeOpt: Opt[Util.Specialized.Type]
Data type option
Data type option
By default typeOpt returns Opt.Void
All customized bases (like Stream.A.Chars)) return appropriate type
-
def
unequalOpt(that: ~[Short], firstIndex: Int = 0, check: (Short, Short) ⇒ Boolean = _ == _): Opt[String]
Unequal check
Unequal check
Pumps both streams and compares all corresponding elements
When first not equal pair is found, message is returned
If all elements are equal, Opt.Void is returned
(0 to 10).all unequalOpt (0 to 10) // Prints: Opt.Void (0 to 10).all unequalOpt (0 until 10) // Prints: Opt(First has more elements) (0 to 5).all + 7 + 8 unequalOpt (0 to 10) // Prints: Opt(Fail at index 6: 7 != 6)
- firstIndex
- start of element indexing for error messages
- check
is the function to compare two elements
-
def
unfold(f: Mapping[~[Short], Short]): Stream[Short]
Lazy infinite stream
Lazy infinite stream
Lazily unfolds next value with a function taking all prior values
// Unfoldifg Fibonacci Sequence (0 to 1).all.unfold(_.letLast(2).sum).letNext(20).lp // Output ~(0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181)
-
def
unzip[B, C](implicit f: (Short) ⇒ (B, C)): (Stream[B], Stream[C])
Unzips stream in two
Unzips stream in two
val pairs = ('a' to 'g').all.zipMap(_.upper).to[Idx] pairs.all.lp // Prints ~((a,A), (b,B), (c,C), (d,D), (e,E), (f,F), (g,G)) val (left, right) = pairs.all.unzip left.all.lp // Prints ~(a, b, c, d, e, f, g) right.all.lp // Prints ~(G, F, E, D, C, B, A)
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
def
zip[B](that: ~[B]): Stream[(Short, B)]
Zip that
-
def
zipAll[B](that: ~[B], thisDflt: Opt[Short], thatDflt: Opt[B]): Stream[(Short, B)]
Merge stream
Merge stream
Merges this and that streams, creating Tuples for corresponding elements
('a' to 'f').all.zip('A' to 'H', '?', '?').lp // Output ~((a,A), (b,B), (c,C), (d,D), (e,E), (f,F), (?,G), (?,H))
-
def
zipFoldAs[B](start: B)(f: Folding.As[B, Short]): Stream[(Short, B)]
Merges current folding value
-
def
zipIdx(start: Int): Stream[(Int, Short)]
Merge index
-
def
zipIdx: Stream[(Int, Short)]
Merge index
-
def
zipMap[B](f: Mapping[Short, B]): Stream[(Short, B)]
Merge property
-
def
zipNext: Stream[(Short, Opt[Short])]
Merge with next
-
def
zipPrior: Stream[(Opt[Short], Short)]
Merge with prior
Operator
Hidden
-
abstract
def
_apply(i: Int): Short
- Attributes
- protected
- Definition Classes
- Indexed
-
def
_consumeIndexed(f: Consumer[Short]): Unit
- Attributes
- protected
- Definition Classes
- _Class
-
def
_pumpIndexed: Short
- Attributes
- protected
- Definition Classes
- _Class
-
abstract
def
_size: Int
- Attributes
- protected
- Definition Classes
- Indexed