提交 07c71753 编写于 作者: B briangoetz

8015318: Extend Collector with 'finish' operation

Reviewed-by: mduigou
Contributed-by: brian.goetz@oracle.com
上级 fcb44dca
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
package java.util; package java.util;
import java.util.function.DoubleConsumer; import java.util.function.DoubleConsumer;
import java.util.stream.Collector;
/** /**
* A state object for collecting statistics such as count, min, max, sum, and * A state object for collecting statistics such as count, min, max, sum, and
...@@ -40,19 +41,19 @@ import java.util.function.DoubleConsumer; ...@@ -40,19 +41,19 @@ import java.util.function.DoubleConsumer;
* }</pre> * }</pre>
* *
* <p>{@code DoubleSummaryStatistics} can be used as a * <p>{@code DoubleSummaryStatistics} can be used as a
* {@linkplain java.util.stream.Stream#reduce(java.util.function.BinaryOperator) reduction} * {@linkplain java.util.stream.Stream#collect(Collector) reduction}
* target for a {@linkplain java.util.stream.Stream stream}. For example: * target for a {@linkplain java.util.stream.Stream stream}. For example:
* *
* <pre> {@code * <pre> {@code
* DoubleSummaryStatistics stats = people.stream() * DoubleSummaryStatistics stats = people.stream()
* .collect(Collectors.toDoubleSummaryStatistics(Person::getWeight)); * .collect(Collectors.summarizingDouble(Person::getWeight));
*}</pre> *}</pre>
* *
* This computes, in a single pass, the count of people, as well as the minimum, * This computes, in a single pass, the count of people, as well as the minimum,
* maximum, sum, and average of their weights. * maximum, sum, and average of their weights.
* *
* @implNote This implementation is not thread safe. However, it is safe to use * @implNote This implementation is not thread safe. However, it is safe to use
* {@link java.util.stream.Collectors#toDoubleSummaryStatistics(java.util.function.ToDoubleFunction) * {@link java.util.stream.Collectors#summarizingDouble(java.util.function.ToDoubleFunction)
* Collectors.toDoubleStatistics()} on a parallel stream, because the parallel * Collectors.toDoubleStatistics()} on a parallel stream, because the parallel
* implementation of {@link java.util.stream.Stream#collect Stream.collect()} * implementation of {@link java.util.stream.Stream#collect Stream.collect()}
* provides the necessary partitioning, isolation, and merging of results for * provides the necessary partitioning, isolation, and merging of results for
...@@ -152,7 +153,7 @@ public class DoubleSummaryStatistics implements DoubleConsumer { ...@@ -152,7 +153,7 @@ public class DoubleSummaryStatistics implements DoubleConsumer {
} }
/** /**
* Returns the average of values recorded, or zero if no values have been * Returns the arithmetic mean of values recorded, or zero if no values have been
* recorded. The average returned can vary depending upon the order in * recorded. The average returned can vary depending upon the order in
* which values are recorded. This is due to accumulated rounding error in * which values are recorded. This is due to accumulated rounding error in
* addition of values of differing magnitudes. Values sorted by increasing * addition of values of differing magnitudes. Values sorted by increasing
...@@ -160,7 +161,7 @@ public class DoubleSummaryStatistics implements DoubleConsumer { ...@@ -160,7 +161,7 @@ public class DoubleSummaryStatistics implements DoubleConsumer {
* value is a {@code NaN} or the sum is at any point a {@code NaN} then the * value is a {@code NaN} or the sum is at any point a {@code NaN} then the
* average will be {@code NaN}. * average will be {@code NaN}.
* *
* @return the average of values, or zero if none * @return the arithmetic mean of values, or zero if none
*/ */
public final double getAverage() { public final double getAverage() {
return getCount() > 0 ? getSum() / getCount() : 0.0d; return getCount() > 0 ? getSum() / getCount() : 0.0d;
......
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
package java.util; package java.util;
import java.util.function.IntConsumer; import java.util.function.IntConsumer;
import java.util.stream.Collector;
/** /**
* A state object for collecting statistics such as count, min, max, sum, and * A state object for collecting statistics such as count, min, max, sum, and
...@@ -40,19 +41,19 @@ import java.util.function.IntConsumer; ...@@ -40,19 +41,19 @@ import java.util.function.IntConsumer;
* }</pre> * }</pre>
* *
* <p>{@code IntSummaryStatistics} can be used as a * <p>{@code IntSummaryStatistics} can be used as a
* {@linkplain java.util.stream.Stream#reduce(java.util.function.BinaryOperator) reduction} * {@linkplain java.util.stream.Stream#collect(Collector) reduction}
* target for a {@linkplain java.util.stream.Stream stream}. For example: * target for a {@linkplain java.util.stream.Stream stream}. For example:
* *
* <pre> {@code * <pre> {@code
* IntSummaryStatistics stats = people.stream() * IntSummaryStatistics stats = people.stream()
* .collect(Collectors.toIntSummaryStatistics(Person::getDependents)); * .collect(Collectors.summarizingInt(Person::getDependents));
*}</pre> *}</pre>
* *
* This computes, in a single pass, the count of people, as well as the minimum, * This computes, in a single pass, the count of people, as well as the minimum,
* maximum, sum, and average of their number of dependents. * maximum, sum, and average of their number of dependents.
* *
* @implNote This implementation is not thread safe. However, it is safe to use * @implNote This implementation is not thread safe. However, it is safe to use
* {@link java.util.stream.Collectors#toIntSummaryStatistics(java.util.function.ToIntFunction) * {@link java.util.stream.Collectors#summarizingInt(java.util.function.ToIntFunction)
* Collectors.toIntStatistics()} on a parallel stream, because the parallel * Collectors.toIntStatistics()} on a parallel stream, because the parallel
* implementation of {@link java.util.stream.Stream#collect Stream.collect()} * implementation of {@link java.util.stream.Stream#collect Stream.collect()}
* provides the necessary partitioning, isolation, and merging of results for * provides the necessary partitioning, isolation, and merging of results for
...@@ -140,10 +141,10 @@ public class IntSummaryStatistics implements IntConsumer { ...@@ -140,10 +141,10 @@ public class IntSummaryStatistics implements IntConsumer {
} }
/** /**
* Returns the average of values recorded, or zero if no values have been * Returns the arithmetic mean of values recorded, or zero if no values have been
* recorded. * recorded.
* *
* @return the average of values, or zero if none * @return the arithmetic mean of values, or zero if none
*/ */
public final double getAverage() { public final double getAverage() {
return getCount() > 0 ? (double) getSum() / getCount() : 0.0d; return getCount() > 0 ? (double) getSum() / getCount() : 0.0d;
......
...@@ -26,6 +26,7 @@ package java.util; ...@@ -26,6 +26,7 @@ package java.util;
import java.util.function.IntConsumer; import java.util.function.IntConsumer;
import java.util.function.LongConsumer; import java.util.function.LongConsumer;
import java.util.stream.Collector;
/** /**
* A state object for collecting statistics such as count, min, max, sum, and * A state object for collecting statistics such as count, min, max, sum, and
...@@ -41,19 +42,19 @@ import java.util.function.LongConsumer; ...@@ -41,19 +42,19 @@ import java.util.function.LongConsumer;
* }</pre> * }</pre>
* *
* <p>{@code LongSummaryStatistics} can be used as a * <p>{@code LongSummaryStatistics} can be used as a
* {@linkplain java.util.stream.Stream#reduce(java.util.function.BinaryOperator) reduction} * {@linkplain java.util.stream.Stream#collect(Collector)} reduction}
* target for a {@linkplain java.util.stream.Stream stream}. For example: * target for a {@linkplain java.util.stream.Stream stream}. For example:
* *
* <pre> {@code * <pre> {@code
* LongSummaryStatistics stats = people.stream() * LongSummaryStatistics stats = people.stream()
* .collect(Collectors.toLongSummaryStatistics(Person::getAge)); * .collect(Collectors.summarizingLong(Person::getAge));
*}</pre> *}</pre>
* *
* This computes, in a single pass, the count of people, as well as the minimum, * This computes, in a single pass, the count of people, as well as the minimum,
* maximum, sum, and average of their ages in milliseconds. * maximum, sum, and average of their ages.
* *
* @implNote This implementation is not thread safe. However, it is safe to use * @implNote This implementation is not thread safe. However, it is safe to use
* {@link java.util.stream.Collectors#toLongSummaryStatistics(java.util.function.ToLongFunction) * {@link java.util.stream.Collectors#summarizingLong(java.util.function.ToLongFunction)
* Collectors.toLongStatistics()} on a parallel stream, because the parallel * Collectors.toLongStatistics()} on a parallel stream, because the parallel
* implementation of {@link java.util.stream.Stream#collect Stream.collect()} * implementation of {@link java.util.stream.Stream#collect Stream.collect()}
* provides the necessary partitioning, isolation, and merging of results for * provides the necessary partitioning, isolation, and merging of results for
...@@ -152,10 +153,10 @@ public class LongSummaryStatistics implements LongConsumer, IntConsumer { ...@@ -152,10 +153,10 @@ public class LongSummaryStatistics implements LongConsumer, IntConsumer {
} }
/** /**
* Returns the average of values recorded, or zero if no values have been * Returns the arithmetic mean of values recorded, or zero if no values have been
* recorded. * recorded.
* *
* @return The average of values, or zero if none * @return The arithmetic mean of values, or zero if none
*/ */
public final double getAverage() { public final double getAverage() {
return getCount() > 0 ? (double) getSum() / getCount() : 0.0d; return getCount() > 0 ? (double) getSum() / getCount() : 0.0d;
......
...@@ -49,16 +49,17 @@ package java.util; ...@@ -49,16 +49,17 @@ package java.util;
* <p> * <p>
* A {@code StringJoiner} may be employed to create formatted output from a * A {@code StringJoiner} may be employed to create formatted output from a
* {@link java.util.stream.Stream} using * {@link java.util.stream.Stream} using
* {@link java.util.stream.Collectors#toStringJoiner}. For example: * {@link java.util.stream.Collectors#joining(CharSequence)}. For example:
* *
* <pre> {@code * <pre> {@code
* List<Integer> numbers = Arrays.asList(1, 2, 3, 4); * List<Integer> numbers = Arrays.asList(1, 2, 3, 4);
* String commaSeparatedNumbers = numbers.stream() * String commaSeparatedNumbers = numbers.stream()
* .map(i -> i.toString()) * .map(i -> i.toString())
* .collect(Collectors.toStringJoiner(", ")).toString(); * .collect(Collectors.joining(", "));
* }</pre> * }</pre>
* *
* @see java.util.stream.Collectors#toStringJoiner * @see java.util.stream.Collectors#joining(CharSequence)
* @see java.util.stream.Collectors#joining(CharSequence, CharSequence, CharSequence)
* @since 1.8 * @since 1.8
*/ */
public final class StringJoiner { public final class StringJoiner {
......
...@@ -25,40 +25,45 @@ ...@@ -25,40 +25,45 @@
package java.util.stream; package java.util.stream;
import java.util.Collections; import java.util.Collections;
import java.util.EnumSet;
import java.util.Set; import java.util.Set;
import java.util.function.BiFunction; import java.util.function.BiConsumer;
import java.util.function.BinaryOperator; import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.Supplier; import java.util.function.Supplier;
/** /**
* A <a href="package-summary.html#Reduction">reduction operation</a> that * A <a href="package-summary.html#Reduction">reduction operation</a> that
* supports folding input elements into a cumulative result. The result may be * folds input elements into a mutable result container, optionally transforming
* a value or may be a mutable result container. Examples of operations * the accumulated result into a final representation after all input elements
* accumulating results into a mutable result container include: accumulating * have been processed.
* input elements into a {@code Collection}; concatenating strings into a *
* {@code StringBuilder}; computing summary information about elements such as * <p>Examples of mutable reduction operations include:
* sum, min, max, or average; computing "pivot table" summaries such as "maximum * accumulating elements into a {@code Collection}; concatenating
* valued transaction by seller", etc. Reduction operations can be performed * strings using a {@code StringBuilder}; computing summary information about
* either sequentially or in parallel. * elements such as sum, min, max, or average; computing "pivot table" summaries
* such as "maximum valued transaction by seller", etc. Reduction operations
* can be performed either sequentially or in parallel.
* *
* <p>The following are examples of using the predefined {@code Collector} * <p>The following are examples of using the predefined {@code Collector}
* implementations in {@link Collectors} with the {@code Stream} API to perform * implementations in {@link Collectors} with the {@code Stream} API to perform
* mutable reduction tasks: * mutable reduction tasks:
* <pre>{@code * <pre>{@code
* // Accumulate elements into a List * // Accumulate names into a List
* List<String> list = stream.collect(Collectors.toList()); * List<String> list = people.stream().map(Person::getName).collect(Collectors.toList());
* *
* // Accumulate elements into a TreeSet * // Accumulate names into a TreeSet
* Set<String> list = stream.collect(Collectors.toCollection(TreeSet::new)); * Set<String> list = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new));
* *
* // Convert elements to strings and concatenate them, separated by commas * // Convert elements to strings and concatenate them, separated by commas
* String joined = stream.map(Object::toString) * String joined = things.stream()
* .collect(Collectors.toStringJoiner(", ")) * .map(Object::toString)
* .toString(); * .collect(Collectors.joining(", "));
* *
* // Find highest-paid employee * // Find highest-paid employee
* Employee highestPaid = employees.stream() * Employee highestPaid = employees.stream()
* .collect(Collectors.maxBy(Comparators.comparing(Employee::getSalary))); * .collect(Collectors.maxBy(Comparators.comparing(Employee::getSalary)))
* .get();
* *
* // Group employees by department * // Group employees by department
* Map<Department, List<Employee>> byDept * Map<Department, List<Employee>> byDept
...@@ -66,7 +71,7 @@ import java.util.function.Supplier; ...@@ -66,7 +71,7 @@ import java.util.function.Supplier;
* .collect(Collectors.groupingBy(Employee::getDepartment)); * .collect(Collectors.groupingBy(Employee::getDepartment));
* *
* // Find highest-paid employee by department * // Find highest-paid employee by department
* Map<Department, Employee> highestPaidByDept * Map<Department, Optional<Employee>> highestPaidByDept
* = employees.stream() * = employees.stream()
* .collect(Collectors.groupingBy(Employee::getDepartment, * .collect(Collectors.groupingBy(Employee::getDepartment,
* Collectors.maxBy(Comparators.comparing(Employee::getSalary)))); * Collectors.maxBy(Comparators.comparing(Employee::getSalary))));
...@@ -74,43 +79,42 @@ import java.util.function.Supplier; ...@@ -74,43 +79,42 @@ import java.util.function.Supplier;
* // Partition students into passing and failing * // Partition students into passing and failing
* Map<Boolean, List<Student>> passingFailing = * Map<Boolean, List<Student>> passingFailing =
* students.stream() * students.stream()
* .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD); * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD));
* *
* }</pre> * }</pre>
* *
* <p>A {@code Collector} is specified by three functions that work together to * <p>A {@code Collector} is specified by four functions that work together to
* manage a result or result container. They are: creation of an initial * accumulate entries into a mutable result container, and optionally perform
* result, incorporating a new data element into a result, and combining two * a final transform on the result. They are: creation of a new result container,
* results into one. The last function -- combining two results into one -- is * incorporating a new data element into a result container, combining two
* used during parallel operations, where subsets of the input are accumulated * result containers into one, and performing a final transform on the container.
* in parallel, and then the subresults merged into a combined result. The * The combiner function is used during parallel operations, where
* result may be a mutable container or a value. If the result is mutable, the * subsets of the input are accumulated into separate result
* accumulation and combination functions may either mutate their left argument * containers, and then the subresults merged into a combined result. The
* and return that (such as adding elements to a collection), or return a new * combiner function may merge one set of subresults into the other and return
* result, in which case it should not perform any mutation. * that, or it may return a new object to describe the combined results.
* *
* <p>Collectors also have a set of characteristics, including * <p>Collectors also have a set of characteristics, such as
* {@link Characteristics#CONCURRENT} and * {@link Characteristics#CONCURRENT}. These characteristics provide
* {@link Characteristics#STRICTLY_MUTATIVE}. These characteristics provide
* hints that can be used by a reduction implementation to provide better * hints that can be used by a reduction implementation to provide better
* performance. * performance.
* *
* <p>Libraries that implement reduction based on {@code Collector}, such as * <p>Libraries that implement reduction based on {@code Collector}, such as
* {@link Stream#collect(Collector)}, must adhere to the following constraints: * {@link Stream#collect(Collector)}, must adhere to the following constraints:
* <ul> * <ul>
* <li>The first argument passed to the accumulator function, and both * <li>The first argument passed to the accumulator function, both
* arguments passed to the combiner function, must be the result of a * arguments passed to the combiner function, and the argument passed to the
* previous invocation of {@link #resultSupplier()}, {@link #accumulator()}, * finisher function must be the result of a previous invocation of the
* or {@link #combiner()}.</li> * result supplier, accumulator, or combiner functions.</li>
* <li>The implementation should not do anything with the result of any of * <li>The implementation should not do anything with the result of any of
* the result supplier, accumulator, or combiner functions other than to * the result supplier, accumulator, or combiner functions other than to
* pass them again to the accumulator or combiner functions, or return them * pass them again to the accumulator, combiner, or finisher functions,
* to the caller of the reduction operation.</li> * or return them to the caller of the reduction operation.</li>
* <li>If a result is passed to the accumulator or combiner function, and * <li>If a result is passed to the combiner or finisher
* the same object is not returned from that function, it is never used * function, and the same object is not returned from that function, it is
* again.</li> * never used again.</li>
* <li>Once a result is passed to the combiner function, it is never passed * <li>Once a result is passed to the combiner or finisher function, it
* to the accumulator function again.</li> * is never passed to the accumulator function again.</li>
* <li>For non-concurrent collectors, any result returned from the result * <li>For non-concurrent collectors, any result returned from the result
* supplier, accumulator, or combiner functions must be serially * supplier, accumulator, or combiner functions must be serially
* thread-confined. This enables collection to occur in parallel without * thread-confined. This enables collection to occur in parallel without
...@@ -132,11 +136,10 @@ import java.util.function.Supplier; ...@@ -132,11 +136,10 @@ import java.util.function.Supplier;
* Performing a reduction operation with a {@code Collector} should produce a * Performing a reduction operation with a {@code Collector} should produce a
* result equivalent to: * result equivalent to:
* <pre>{@code * <pre>{@code
* BiFunction<R,T,R> accumulator = collector.accumulator(); * R container = collector.supplier().get();
* R result = collector.resultSupplier().get();
* for (T t : data) * for (T t : data)
* result = accumulator.apply(result, t); * collector.accumulator().accept(container, t);
* return result; * return collector.finisher().apply(container);
* }</pre> * }</pre>
* *
* <p>However, the library is free to partition the input, perform the reduction * <p>However, the library is free to partition the input, perform the reduction
...@@ -149,7 +152,7 @@ import java.util.function.Supplier; ...@@ -149,7 +152,7 @@ import java.util.function.Supplier;
* is accumulating elements into a {@code TreeSet}. In this case, the {@code * is accumulating elements into a {@code TreeSet}. In this case, the {@code
* resultSupplier()} function is {@code () -> new Treeset<T>()}, the * resultSupplier()} function is {@code () -> new Treeset<T>()}, the
* {@code accumulator} function is * {@code accumulator} function is
* {@code (set, element) -> { set.add(element); return set; }}, and the combiner * {@code (set, element) -> set.add(element) }, and the combiner
* function is {@code (left, right) -> { left.addAll(right); return left; }}. * function is {@code (left, right) -> { left.addAll(right); return left; }}.
* (This behavior is implemented by * (This behavior is implemented by
* {@code Collectors.toCollection(TreeSet::new)}). * {@code Collectors.toCollection(TreeSet::new)}).
...@@ -159,51 +162,49 @@ import java.util.function.Supplier; ...@@ -159,51 +162,49 @@ import java.util.function.Supplier;
* @see Stream#collect(Collector) * @see Stream#collect(Collector)
* @see Collectors * @see Collectors
* *
* @param <T> the type of input element to the collect operation * @param <T> the type of input elements to the reduction operation
* @param <R> the result type of the collect operation * @param <A> the mutable accumulation type of the reduction operation (often
* hidden as an implementation detail)
* @param <R> the result type of the reduction operation
* @since 1.8 * @since 1.8
*/ */
public interface Collector<T, R> { public interface Collector<T, A, R> {
/** /**
* A function that creates and returns a new result that represents * A function that creates and returns a new mutable result container.
* "no values". If the accumulator or combiner functions may mutate their
* arguments, this must be a new, empty result container.
* *
* @return a function which, when invoked, returns a result representing * @return a function which returns a new, mutable result container
* "no values"
*/ */
Supplier<R> resultSupplier(); Supplier<A> supplier();
/** /**
* A function that folds a new value into a cumulative result. The result * A function that folds a new value into a mutable result container.
* may be a mutable result container or a value. The accumulator function
* may modify a mutable container and return it, or create a new result and
* return that, but if it returns a new result object, it must not modify
* any of its arguments.
*
* <p>If the collector has the {@link Characteristics#STRICTLY_MUTATIVE}
* characteristic, then the accumulator function <em>must</em> always return
* its first argument, after possibly mutating its state.
* *
* @return a function which folds a new value into a cumulative result * @return a function which folds a new value into a mutable result container
*/ */
BiFunction<R, T, R> accumulator(); BiConsumer<A, T> accumulator();
/** /**
* A function that accepts two partial results and merges them. The * A function that accepts two partial results and merges them. The
* combiner function may fold state from one argument into the other and * combiner function may fold state from one argument into the other and
* return that, or may return a new result object, but if it returns * return that, or may return a new result object.
* a new result object, it must not modify the state of either of its
* arguments.
*
* <p>If the collector has the {@link Characteristics#STRICTLY_MUTATIVE}
* characteristic, then the combiner function <em>must</em> always return
* its first argument, after possibly mutating its state.
* *
* @return a function which combines two partial results into a cumulative * @return a function which combines two partial results into a cumulative
* result * result
*/ */
BinaryOperator<R> combiner(); BinaryOperator<A> combiner();
/**
* Perform the final transformation from the intermediate accumulation type
* {@code A} to the final result representation {@code R}.
*
* <p>If the characteristic {@code IDENTITY_TRANSFORM} is
* set, this function may be presumed to be an identity transform with an
* unchecked cast from {@code A} to {@code R}.
*
* @return a function which transforms the intermediate result to the final
* result
*/
Function<A, R> finisher();
/** /**
* Returns a {@code Set} of {@code Collector.Characteristics} indicating * Returns a {@code Set} of {@code Collector.Characteristics} indicating
...@@ -213,6 +214,62 @@ public interface Collector<T, R> { ...@@ -213,6 +214,62 @@ public interface Collector<T, R> {
*/ */
Set<Characteristics> characteristics(); Set<Characteristics> characteristics();
/**
* Returns a new {@code Collector} described by the given {@code supplier},
* {@code accumulator}, and {@code combiner} functions. The resulting
* {@code Collector} has the {@code Collector.Characteristics.IDENTITY_FINISH}
* characteristic.
*
* @param supplier The supplier function for the new collector
* @param accumulator The accumulator function for the new collector
* @param combiner The combiner function for the new collector
* @param characteristics The collector characteristics for the new
* collector
* @param <T> The type of input elements for the new collector
* @param <R> The type of intermediate accumulation result, and final result,
* for the new collector
* @return the new {@code Collector}
*/
public static<T, R> Collector<T, R, R> of(Supplier<R> supplier,
BiConsumer<R, T> accumulator,
BinaryOperator<R> combiner,
Characteristics... characteristics) {
Set<Characteristics> cs = (characteristics.length == 0)
? Collectors.CH_ID
: Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.IDENTITY_FINISH,
characteristics));
return new Collectors.CollectorImpl<>(supplier, accumulator, combiner, cs);
}
/**
* Returns a new {@code Collector} described by the given {@code supplier},
* {@code accumulator}, {@code combiner}, and {@code finisher} functions.
*
* @param supplier The supplier function for the new collector
* @param accumulator The accumulator function for the new collector
* @param combiner The combiner function for the new collector
* @param finisher The finisher function for the new collector
* @param characteristics The collector characteristics for the new
* collector
* @param <T> The type of input elements for the new collector
* @param <A> The intermediate accumulation type of the new collector
* @param <R> The final result type of the new collector
* @return the new {@code Collector}
*/
public static<T, A, R> Collector<T, A, R> of(Supplier<A> supplier,
BiConsumer<A, T> accumulator,
BinaryOperator<A> combiner,
Function<A, R> finisher,
Characteristics... characteristics) {
Set<Characteristics> cs = Collectors.CH_NOID;
if (characteristics.length > 0) {
cs = EnumSet.noneOf(Characteristics.class);
Collections.addAll(cs, characteristics);
cs = Collections.unmodifiableSet(cs);
}
return new Collectors.CollectorImpl<>(supplier, accumulator, combiner, finisher, cs);
}
/** /**
* Characteristics indicating properties of a {@code Collector}, which can * Characteristics indicating properties of a {@code Collector}, which can
* be used to optimize reduction implementations. * be used to optimize reduction implementations.
...@@ -222,8 +279,7 @@ public interface Collector<T, R> { ...@@ -222,8 +279,7 @@ public interface Collector<T, R> {
* Indicates that this collector is <em>concurrent</em>, meaning that * Indicates that this collector is <em>concurrent</em>, meaning that
* the result container can support the accumulator function being * the result container can support the accumulator function being
* called concurrently with the same result container from multiple * called concurrently with the same result container from multiple
* threads. Concurrent collectors must also always have the * threads.
* {@code STRICTLY_MUTATIVE} characteristic.
* *
* <p>If a {@code CONCURRENT} collector is not also {@code UNORDERED}, * <p>If a {@code CONCURRENT} collector is not also {@code UNORDERED},
* then it should only be evaluated concurrently if applied to an * then it should only be evaluated concurrently if applied to an
...@@ -238,12 +294,10 @@ public interface Collector<T, R> { ...@@ -238,12 +294,10 @@ public interface Collector<T, R> {
UNORDERED, UNORDERED,
/** /**
* Indicates that this collector operates by strict mutation of its * Indicates that the finisher function is the identity function and
* result container. This means that the {@link #accumulator()} and * can be elided. If set, it must be the case that an unchecked cast
* {@link #combiner()} functions will always modify the state of and * from A to R will succeed.
* return their first argument, rather than returning a different result
* container.
*/ */
STRICTLY_MUTATIVE IDENTITY_FINISH
} }
} }
...@@ -27,6 +27,7 @@ package java.util.stream; ...@@ -27,6 +27,7 @@ package java.util.stream;
import java.util.AbstractMap; import java.util.AbstractMap;
import java.util.AbstractSet; import java.util.AbstractSet;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
...@@ -39,14 +40,16 @@ import java.util.Iterator; ...@@ -39,14 +40,16 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.LongSummaryStatistics; import java.util.LongSummaryStatistics;
import java.util.Map; import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.StringJoiner; import java.util.StringJoiner;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.function.BiConsumer;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.BinaryOperator; import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.function.Supplier; import java.util.function.Supplier;
...@@ -64,20 +67,21 @@ import java.util.function.ToLongFunction; ...@@ -64,20 +67,21 @@ import java.util.function.ToLongFunction;
* mutable reduction tasks: * mutable reduction tasks:
* *
* <pre>{@code * <pre>{@code
* // Accumulate elements into a List * // Accumulate names into a List
* List<Person> list = people.collect(Collectors.toList()); * List<String> list = people.stream().map(Person::getName).collect(Collectors.toList());
* *
* // Accumulate elements into a TreeSet * // Accumulate names into a TreeSet
* List<Person> list = people.collect(Collectors.toCollection(TreeSet::new)); * Set<String> list = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new));
* *
* // Convert elements to strings and concatenate them, separated by commas * // Convert elements to strings and concatenate them, separated by commas
* String joined = stream.map(Object::toString) * String joined = things.stream()
* .collect(Collectors.toStringJoiner(", ")) * .map(Object::toString)
* .toString(); * .collect(Collectors.joining(", "));
* *
* // Find highest-paid employee * // Find highest-paid employee
* Employee highestPaid = employees.stream() * Employee highestPaid = employees.stream()
* .collect(Collectors.maxBy(Comparator.comparing(Employee::getSalary))); * .collect(Collectors.maxBy(Comparator.comparing(Employee::getSalary)))
* .get();
* *
* // Group employees by department * // Group employees by department
* Map<Department, List<Employee>> byDept * Map<Department, List<Employee>> byDept
...@@ -85,7 +89,7 @@ import java.util.function.ToLongFunction; ...@@ -85,7 +89,7 @@ import java.util.function.ToLongFunction;
* .collect(Collectors.groupingBy(Employee::getDepartment)); * .collect(Collectors.groupingBy(Employee::getDepartment));
* *
* // Find highest-paid employee by department * // Find highest-paid employee by department
* Map<Department, Employee> highestPaidByDept * Map<Department, Optional<Employee>> highestPaidByDept
* = employees.stream() * = employees.stream()
* .collect(Collectors.groupingBy(Employee::getDepartment, * .collect(Collectors.groupingBy(Employee::getDepartment,
* Collectors.maxBy(Comparator.comparing(Employee::getSalary)))); * Collectors.maxBy(Comparator.comparing(Employee::getSalary))));
...@@ -93,7 +97,7 @@ import java.util.function.ToLongFunction; ...@@ -93,7 +97,7 @@ import java.util.function.ToLongFunction;
* // Partition students into passing and failing * // Partition students into passing and failing
* Map<Boolean, List<Student>> passingFailing = * Map<Boolean, List<Student>> passingFailing =
* students.stream() * students.stream()
* .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD); * .collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD));
* *
* }</pre> * }</pre>
* *
...@@ -103,15 +107,19 @@ import java.util.function.ToLongFunction; ...@@ -103,15 +107,19 @@ import java.util.function.ToLongFunction;
*/ */
public final class Collectors { public final class Collectors {
private static final Set<Collector.Characteristics> CH_CONCURRENT static final Set<Collector.Characteristics> CH_CONCURRENT_ID
= Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT,
Collector.Characteristics.UNORDERED,
Collector.Characteristics.IDENTITY_FINISH));
static final Set<Collector.Characteristics> CH_CONCURRENT_NOID
= Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT, = Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.CONCURRENT,
Collector.Characteristics.STRICTLY_MUTATIVE,
Collector.Characteristics.UNORDERED));
private static final Set<Collector.Characteristics> CH_STRICT
= Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.STRICTLY_MUTATIVE));
private static final Set<Collector.Characteristics> CH_STRICT_UNORDERED
= Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.STRICTLY_MUTATIVE,
Collector.Characteristics.UNORDERED)); Collector.Characteristics.UNORDERED));
static final Set<Collector.Characteristics> CH_ID
= Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.IDENTITY_FINISH));
static final Set<Collector.Characteristics> CH_UNORDERED_ID
= Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.UNORDERED,
Collector.Characteristics.IDENTITY_FINISH));
static final Set<Collector.Characteristics> CH_NOID = Collections.emptySet();
private Collectors() { } private Collectors() { }
...@@ -124,87 +132,63 @@ public final class Collectors { ...@@ -124,87 +132,63 @@ public final class Collectors {
* *
* @param <T> the type of input arguments to the merge function * @param <T> the type of input arguments to the merge function
* @return a merge function which always throw {@code IllegalStateException} * @return a merge function which always throw {@code IllegalStateException}
*
* @see #firstWinsMerger()
* @see #lastWinsMerger()
*/ */
public static <T> BinaryOperator<T> throwingMerger() { private static <T> BinaryOperator<T> throwingMerger() {
return (u,v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; return (u,v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); };
} }
/**
* Returns a merge function, suitable for use in
* {@link Map#merge(Object, Object, BiFunction) Map.merge()} or
* {@link #toMap(Function, Function, BinaryOperator) toMap()},
* which implements a "first wins" policy.
*
* @param <T> the type of input arguments to the merge function
* @return a merge function which always returns its first argument
* @see #lastWinsMerger()
* @see #throwingMerger()
*/
public static <T> BinaryOperator<T> firstWinsMerger() {
return (u,v) -> u;
}
/**
* Returns a merge function, suitable for use in
* {@link Map#merge(Object, Object, BiFunction) Map.merge()} or
* {@link #toMap(Function, Function, BinaryOperator) toMap()},
* which implements a "last wins" policy.
*
* @param <T> the type of input arguments to the merge function
* @return a merge function which always returns its second argument
* @see #firstWinsMerger()
* @see #throwingMerger()
*/
public static <T> BinaryOperator<T> lastWinsMerger() {
return (u,v) -> v;
}
/** /**
* Simple implementation class for {@code Collector}. * Simple implementation class for {@code Collector}.
* *
* @param <T> the type of elements to be collected * @param <T> the type of elements to be collected
* @param <R> the type of the result * @param <R> the type of the result
*/ */
private static final class CollectorImpl<T, R> implements Collector<T,R> { static class CollectorImpl<T, A, R> implements Collector<T, A, R> {
private final Supplier<R> resultSupplier; private final Supplier<A> supplier;
private final BiFunction<R, T, R> accumulator; private final BiConsumer<A, T> accumulator;
private final BinaryOperator<R> combiner; private final BinaryOperator<A> combiner;
private final Function<A, R> finisher;
private final Set<Characteristics> characteristics; private final Set<Characteristics> characteristics;
CollectorImpl(Supplier<R> resultSupplier, CollectorImpl(Supplier<A> supplier,
BiFunction<R, T, R> accumulator, BiConsumer<A, T> accumulator,
BinaryOperator<R> combiner, BinaryOperator<A> combiner,
Function<A,R> finisher,
Set<Characteristics> characteristics) { Set<Characteristics> characteristics) {
this.resultSupplier = resultSupplier; this.supplier = supplier;
this.accumulator = accumulator; this.accumulator = accumulator;
this.combiner = combiner; this.combiner = combiner;
this.finisher = finisher;
this.characteristics = characteristics; this.characteristics = characteristics;
} }
CollectorImpl(Supplier<R> resultSupplier, CollectorImpl(Supplier<A> supplier,
BiFunction<R, T, R> accumulator, BiConsumer<A, T> accumulator,
BinaryOperator<R> combiner) { BinaryOperator<A> combiner,
this(resultSupplier, accumulator, combiner, Collections.emptySet()); Set<Characteristics> characteristics) {
this(supplier, accumulator, combiner, i -> (R) i, characteristics);
} }
@Override @Override
public BiFunction<R, T, R> accumulator() { public BiConsumer<A, T> accumulator() {
return accumulator; return accumulator;
} }
@Override @Override
public Supplier<R> resultSupplier() { public Supplier<A> supplier() {
return resultSupplier; return supplier;
} }
@Override @Override
public BinaryOperator<R> combiner() { public BinaryOperator<A> combiner() {
return combiner; return combiner;
} }
@Override
public Function<A, R> finisher() {
return finisher;
}
@Override @Override
public Set<Characteristics> characteristics() { public Set<Characteristics> characteristics() {
return characteristics; return characteristics;
...@@ -224,11 +208,10 @@ public final class Collectors { ...@@ -224,11 +208,10 @@ public final class Collectors {
* {@code Collection}, in encounter order * {@code Collection}, in encounter order
*/ */
public static <T, C extends Collection<T>> public static <T, C extends Collection<T>>
Collector<T, C> toCollection(Supplier<C> collectionFactory) { Collector<T, ?, C> toCollection(Supplier<C> collectionFactory) {
return new CollectorImpl<>(collectionFactory, return new CollectorImpl<>(collectionFactory, Collection::add,
(r, t) -> { r.add(t); return r; },
(r1, r2) -> { r1.addAll(r2); return r1; }, (r1, r2) -> { r1.addAll(r2); return r1; },
CH_STRICT); CH_ID);
} }
/** /**
...@@ -241,36 +224,10 @@ public final class Collectors { ...@@ -241,36 +224,10 @@ public final class Collectors {
* {@code List}, in encounter order * {@code List}, in encounter order
*/ */
public static <T> public static <T>
Collector<T, List<T>> toList() { Collector<T, ?, List<T>> toList() {
BiFunction<List<T>, T, List<T>> accumulator = (list, t) -> { return new CollectorImpl<>((Supplier<List<T>>) ArrayList::new, List::add,
switch (list.size()) { (left, right) -> { left.addAll(right); return left; },
case 0: CH_ID);
return Collections.singletonList(t);
case 1:
List<T> newList = new ArrayList<>();
newList.add(list.get(0));
newList.add(t);
return newList;
default:
list.add(t);
return list;
}
};
BinaryOperator<List<T>> combiner = (left, right) -> {
switch (left.size()) {
case 0:
return right;
case 1:
List<T> newList = new ArrayList<>(left.size() + right.size());
newList.addAll(left);
newList.addAll(right);
return newList;
default:
left.addAll(right);
return left;
}
};
return new CollectorImpl<>(Collections::emptyList, accumulator, combiner);
} }
/** /**
...@@ -286,44 +243,58 @@ public final class Collectors { ...@@ -286,44 +243,58 @@ public final class Collectors {
* {@code Set} * {@code Set}
*/ */
public static <T> public static <T>
Collector<T, Set<T>> toSet() { Collector<T, ?, Set<T>> toSet() {
return new CollectorImpl<>((Supplier<Set<T>>) HashSet::new, return new CollectorImpl<>((Supplier<Set<T>>) HashSet::new, Set::add,
(r, t) -> { r.add(t); return r; }, (left, right) -> { left.addAll(right); return left; },
(r1, r2) -> { r1.addAll(r2); return r1; }, CH_UNORDERED_ID);
CH_STRICT_UNORDERED);
} }
/** /**
* Returns a {@code Collector} that concatenates the input elements into a * Returns a {@code Collector} that concatenates the input elements into a
* new {@link StringBuilder}. * {@code String}, in encounter order.
* *
* @return a {@code Collector} which collects String elements into a * @return a {@code Collector} that concatenates the input elements into a
* {@code StringBuilder}, in encounter order * {@code String}, in encounter order
*/ */
public static Collector<String, StringBuilder> toStringBuilder() { public static Collector<CharSequence, ?, String> joining() {
return new CollectorImpl<>(StringBuilder::new, return new CollectorImpl<CharSequence, StringBuilder, String>(
(r, t) -> { r.append(t); return r; }, StringBuilder::new, StringBuilder::append,
(r1, r2) -> { r1.append(r2); return r1; }, (r1, r2) -> { r1.append(r2); return r1; },
CH_STRICT); StringBuilder::toString, CH_NOID);
} }
/** /**
* Returns a {@code Collector} that concatenates the input elements into a * Returns a {@code Collector} that concatenates the input elements,
* new {@link StringJoiner}, using the specified delimiter. * separated by the specified delimiter, in encounter order.
* *
* @param delimiter the delimiter to be used between each element * @param delimiter the delimiter to be used between each element
* @return A {@code Collector} which collects String elements into a * @return A {@code Collector} which concatenates CharSequence elements,
* {@code StringJoiner}, in encounter order * separated by the specified delimiter, in encounter order
*/ */
public static Collector<CharSequence, StringJoiner> toStringJoiner(CharSequence delimiter) { public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) {
BinaryOperator<StringJoiner> merger = (sj, other) -> { return joining(delimiter, "", "");
if (other.length() > 0) }
sj.add(other.toString());
return sj; /**
}; * Returns a {@code Collector} that concatenates the input elements,
return new CollectorImpl<>(() -> new StringJoiner(delimiter), * separated by the specified delimiter, with the specified prefix and
(r, t) -> { r.add(t); return r; }, * suffix, in encounter order.
merger, CH_STRICT); *
* @param delimiter the delimiter to be used between each element
* @param prefix the sequence of characters to be used at the beginning
* of the joined result
* @param suffix the sequence of characters to be used at the end
* of the joined result
* @return A {@code Collector} which concatenates CharSequence elements,
* separated by the specified delimiter, in encounter order
*/
public static Collector<CharSequence, ?, String> joining(CharSequence delimiter,
CharSequence prefix,
CharSequence suffix) {
return new CollectorImpl<>(
() -> new StringJoiner(delimiter, prefix, suffix),
StringJoiner::add, StringJoiner::merge,
StringJoiner::toString, CH_NOID);
} }
/** /**
...@@ -348,12 +319,13 @@ public final class Collectors { ...@@ -348,12 +319,13 @@ public final class Collectors {
} }
/** /**
* Adapts a {@code Collector<U,R>} to a {@code Collector<T,R>} by applying * Adapts a {@code Collector} accepting elements of type {@code U} to one
* a mapping function to each input element before accumulation. * accepting elements of type {@code T} by applying a mapping function to
* each input element before accumulation.
* *
* @apiNote * @apiNote
* The {@code mapping()} collectors are most useful when used in a * The {@code mapping()} collectors are most useful when used in a
* multi-level reduction, downstream of {@code groupingBy} or * multi-level reduction, such as downstream of a {@code groupingBy} or
* {@code partitioningBy}. For example, given a stream of * {@code partitioningBy}. For example, given a stream of
* {@code Person}, to accumulate the set of last names in each city: * {@code Person}, to accumulate the set of last names in each city:
* <pre>{@code * <pre>{@code
...@@ -364,23 +336,27 @@ public final class Collectors { ...@@ -364,23 +336,27 @@ public final class Collectors {
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param <U> type of elements accepted by downstream collector * @param <U> type of elements accepted by downstream collector
* @param <A> intermediate accumulation type of the downstream collector
* @param <R> result type of collector * @param <R> result type of collector
* @param mapper a function to be applied to the input elements * @param mapper a function to be applied to the input elements
* @param downstream a collector which will accept mapped values * @param downstream a collector which will accept mapped values
* @return a collector which applies the mapping function to the input * @return a collector which applies the mapping function to the input
* elements and provides the mapped results to the downstream collector * elements and provides the mapped results to the downstream collector
*/ */
public static <T, U, R> Collector<T, R> public static <T, U, A, R>
mapping(Function<? super T, ? extends U> mapper, Collector<? super U, R> downstream) { Collector<T, ?, R> mapping(Function<? super T, ? extends U> mapper,
BiFunction<R, ? super U, R> downstreamAccumulator = downstream.accumulator(); Collector<? super U, A, R> downstream) {
return new CollectorImpl<>(downstream.resultSupplier(), BiConsumer<A, ? super U> downstreamAccumulator = downstream.accumulator();
(r, t) -> downstreamAccumulator.apply(r, mapper.apply(t)), return new CollectorImpl<>(downstream.supplier(),
downstream.combiner(), downstream.characteristics()); (r, t) -> downstreamAccumulator.accept(r, mapper.apply(t)),
downstream.combiner(), downstream.finisher(),
downstream.characteristics());
} }
/** /**
* Returns a {@code Collector<T, Long>} that counts the number of input * Returns a {@code Collector} accepting elements of type {@code T} that
* elements. * counts the number of input elements. If no elements are present, the
* result is 0.
* *
* @implSpec * @implSpec
* This produces a result equivalent to: * This produces a result equivalent to:
...@@ -391,14 +367,14 @@ public final class Collectors { ...@@ -391,14 +367,14 @@ public final class Collectors {
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @return a {@code Collector} that counts the input elements * @return a {@code Collector} that counts the input elements
*/ */
public static <T> Collector<T, Long> public static <T> Collector<T, ?, Long>
counting() { counting() {
return reducing(0L, e -> 1L, Long::sum); return reducing(0L, e -> 1L, Long::sum);
} }
/** /**
* Returns a {@code Collector<T, T>} that produces the minimal element * Returns a {@code Collector} that produces the minimal element according
* according to a given {@code Comparator}. * to a given {@code Comparator}, described as an {@code Optional<T>}.
* *
* @implSpec * @implSpec
* This produces a result equivalent to: * This produces a result equivalent to:
...@@ -410,14 +386,14 @@ public final class Collectors { ...@@ -410,14 +386,14 @@ public final class Collectors {
* @param comparator a {@code Comparator} for comparing elements * @param comparator a {@code Comparator} for comparing elements
* @return a {@code Collector} that produces the minimal value * @return a {@code Collector} that produces the minimal value
*/ */
public static <T> Collector<T, T> public static <T> Collector<T, ?, Optional<T>>
minBy(Comparator<? super T> comparator) { minBy(Comparator<? super T> comparator) {
return reducing(BinaryOperator.minBy(comparator)); return reducing(BinaryOperator.minBy(comparator));
} }
/** /**
* Returns a {@code Collector<T, T>} that produces the maximal element * Returns a {@code Collector} that produces the maximal element according
* according to a given {@code Comparator}. * to a given {@code Comparator}, described as an {@code Optional<T>}.
* *
* @implSpec * @implSpec
* This produces a result equivalent to: * This produces a result equivalent to:
...@@ -429,39 +405,143 @@ public final class Collectors { ...@@ -429,39 +405,143 @@ public final class Collectors {
* @param comparator a {@code Comparator} for comparing elements * @param comparator a {@code Comparator} for comparing elements
* @return a {@code Collector} that produces the maximal value * @return a {@code Collector} that produces the maximal value
*/ */
public static <T> Collector<T, T> public static <T> Collector<T, ?, Optional<T>>
maxBy(Comparator<? super T> comparator) { maxBy(Comparator<? super T> comparator) {
return reducing(BinaryOperator.maxBy(comparator)); return reducing(BinaryOperator.maxBy(comparator));
} }
/** /**
* Returns a {@code Collector<T, Long>} that produces the sum of a * Returns a {@code Collector} that produces the sum of a integer-valued
* long-valued function applied to the input element. * function applied to the input elements. If no elements are present,
* the result is 0.
* *
* @implSpec * @param <T> the type of the input elements
* This produces a result equivalent to: * @param mapper a function extracting the property to be summed
* <pre>{@code * @return a {@code Collector} that produces the sum of a derived property
* reducing(0L, mapper, Long::sum) */
* }</pre> public static <T> Collector<T, ?, Integer>
summingInt(ToIntFunction<? super T> mapper) {
return new CollectorImpl<T, int[], Integer>(
() -> new int[1],
(a, t) -> { a[0] += mapper.applyAsInt(t); },
(a, b) -> { a[0] += b[0]; return a; },
a -> a[0], CH_NOID);
}
/**
* Returns a {@code Collector} that produces the sum of a long-valued
* function applied to the input elements. If no elements are present,
* the result is 0.
*
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
*/
public static <T> Collector<T, ?, Long>
summingLong(ToLongFunction<? super T> mapper) {
return new CollectorImpl<T, long[], Long>(
() -> new long[1],
(a, t) -> { a[0] += mapper.applyAsLong(t); },
(a, b) -> { a[0] += b[0]; return a; },
a -> a[0], CH_NOID);
}
/**
* Returns a {@code Collector} that produces the sum of a double-valued
* function applied to the input elements. If no elements are present,
* the result is 0.
*
* <p>The sum returned can vary depending upon the order in which
* values are recorded, due to accumulated rounding error in
* addition of values of differing magnitudes. Values sorted by increasing
* absolute magnitude tend to yield more accurate results. If any recorded
* value is a {@code NaN} or the sum is at any point a {@code NaN} then the
* sum will be {@code NaN}.
*
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
*/
public static <T> Collector<T, ?, Double>
summingDouble(ToDoubleFunction<? super T> mapper) {
return new CollectorImpl<T, double[], Double>(
() -> new double[1],
(a, t) -> { a[0] += mapper.applyAsDouble(t); },
(a, b) -> { a[0] += b[0]; return a; },
a -> a[0], CH_NOID);
}
/**
* Returns a {@code Collector} that produces the arithmetic mean of an integer-valued
* function applied to the input elements. If no elements are present,
* the result is 0.
*
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
*/
public static <T> Collector<T, ?, Double>
averagingInt(ToIntFunction<? super T> mapper) {
return new CollectorImpl<T, long[], Double>(
() -> new long[2],
(a, t) -> { a[0] += mapper.applyAsInt(t); a[1]++; },
(a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; },
a -> (a[1] == 0) ? 0.0d : (double) a[0] / a[1], CH_NOID);
}
/**
* Returns a {@code Collector} that produces the arithmetic mean of a long-valued
* function applied to the input elements. If no elements are present,
* the result is 0.
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed * @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property * @return a {@code Collector} that produces the sum of a derived property
*/ */
public static <T> Collector<T, Long> public static <T> Collector<T, ?, Double>
sumBy(Function<? super T, Long> mapper) { averagingLong(ToLongFunction<? super T> mapper) {
return reducing(0L, mapper, Long::sum); return new CollectorImpl<T, long[], Double>(
() -> new long[2],
(a, t) -> { a[0] += mapper.applyAsLong(t); a[1]++; },
(a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; },
a -> (a[1] == 0) ? 0.0d : (double) a[0] / a[1], CH_NOID);
} }
/** /**
* Returns a {@code Collector<T,T>} which performs a reduction of its * Returns a {@code Collector} that produces the arithmetic mean of a double-valued
* input elements under a specified {@code BinaryOperator}. * function applied to the input elements. If no elements are present,
* the result is 0.
*
* <p>The average returned can vary depending upon the order in which
* values are recorded, due to accumulated rounding error in
* addition of values of differing magnitudes. Values sorted by increasing
* absolute magnitude tend to yield more accurate results. If any recorded
* value is a {@code NaN} or the sum is at any point a {@code NaN} then the
* average will be {@code NaN}.
*
* @param <T> the type of the input elements
* @param mapper a function extracting the property to be summed
* @return a {@code Collector} that produces the sum of a derived property
*/
public static <T> Collector<T, ?, Double>
averagingDouble(ToDoubleFunction<? super T> mapper) {
return new CollectorImpl<T, double[], Double>(
() -> new double[2],
(a, t) -> { a[0] += mapper.applyAsDouble(t); a[1]++; },
(a, b) -> { a[0] += b[0]; a[1] += b[1]; return a; },
a -> (a[1] == 0) ? 0.0d : a[0] / a[1], CH_NOID);
}
/**
* Returns a {@code Collector} which performs a reduction of its
* input elements under a specified {@code BinaryOperator} using the
* provided identity.
* *
* @apiNote * @apiNote
* The {@code reducing()} collectors are most useful when used in a * The {@code reducing()} collectors are most useful when used in a
* multi-level reduction, downstream of {@code groupingBy} or * multi-level reduction, downstream of {@code groupingBy} or
* {@code partitioningBy}. To perform a simple reduction on a stream, * {@code partitioningBy}. To perform a simple reduction on a stream,
* use {@link Stream#reduce(BinaryOperator)} instead. * use {@link Stream#reduce(Object, BinaryOperator)}} instead.
* *
* @param <T> element type for the input and output of the reduction * @param <T> element type for the input and output of the reduction
* @param identity the identity value for the reduction (also, the value * @param identity the identity value for the reduction (also, the value
...@@ -472,14 +552,25 @@ public final class Collectors { ...@@ -472,14 +552,25 @@ public final class Collectors {
* @see #reducing(BinaryOperator) * @see #reducing(BinaryOperator)
* @see #reducing(Object, Function, BinaryOperator) * @see #reducing(Object, Function, BinaryOperator)
*/ */
public static <T> Collector<T, T> public static <T> Collector<T, ?, T>
reducing(T identity, BinaryOperator<T> op) { reducing(T identity, BinaryOperator<T> op) {
return new CollectorImpl<>(() -> identity, (r, t) -> (r == null ? t : op.apply(r, t)), op); return new CollectorImpl<>(
boxSupplier(identity),
(a, t) -> { a[0] = op.apply(a[0], t); },
(a, b) -> { a[0] = op.apply(a[0], b[0]); return a; },
a -> a[0],
CH_NOID);
}
@SuppressWarnings("unchecked")
private static <T> Supplier<T[]> boxSupplier(T identity) {
return () -> (T[]) new Object[] { identity };
} }
/** /**
* Returns a {@code Collector<T,T>} which performs a reduction of its * Returns a {@code Collector} which performs a reduction of its
* input elements under a specified {@code BinaryOperator}. * input elements under a specified {@code BinaryOperator}. The result
* is described as an {@code Optional<T>}.
* *
* @apiNote * @apiNote
* The {@code reducing()} collectors are most useful when used in a * The {@code reducing()} collectors are most useful when used in a
...@@ -491,15 +582,8 @@ public final class Collectors { ...@@ -491,15 +582,8 @@ public final class Collectors {
* person in each city: * person in each city:
* <pre>{@code * <pre>{@code
* Comparator<Person> byHeight = Comparator.comparing(Person::getHeight); * Comparator<Person> byHeight = Comparator.comparing(Person::getHeight);
* BinaryOperator<Person> tallerOf = BinaryOperator.greaterOf(byHeight);
* Map<City, Person> tallestByCity * Map<City, Person> tallestByCity
* = people.stream().collect(groupingBy(Person::getCity, reducing(tallerOf))); * = people.stream().collect(groupingBy(Person::getCity, reducing(BinaryOperator.maxBy(byHeight))));
* }</pre>
*
* @implSpec
* The default implementation is equivalent to:
* <pre>{@code
* reducing(null, op);
* }</pre> * }</pre>
* *
* @param <T> element type for the input and output of the reduction * @param <T> element type for the input and output of the reduction
...@@ -509,13 +593,32 @@ public final class Collectors { ...@@ -509,13 +593,32 @@ public final class Collectors {
* @see #reducing(Object, BinaryOperator) * @see #reducing(Object, BinaryOperator)
* @see #reducing(Object, Function, BinaryOperator) * @see #reducing(Object, Function, BinaryOperator)
*/ */
public static <T> Collector<T, T> public static <T> Collector<T, ?, Optional<T>>
reducing(BinaryOperator<T> op) { reducing(BinaryOperator<T> op) {
return reducing(null, op); class OptionalBox implements Consumer<T> {
T value = null;
boolean present = false;
@Override
public void accept(T t) {
if (present) {
value = op.apply(value, t);
}
else {
value = t;
present = true;
}
}
}
return new CollectorImpl<T, OptionalBox, Optional<T>>(
OptionalBox::new, OptionalBox::accept,
(a, b) -> { if (b.present) a.accept(b.value); return a; },
a -> Optional.ofNullable(a.value), CH_NOID);
} }
/** /**
* Returns a {@code Collector<T,U>} which performs a reduction of its * Returns a {@code Collector} which performs a reduction of its
* input elements under a specified mapping function and * input elements under a specified mapping function and
* {@code BinaryOperator}. This is a generalization of * {@code BinaryOperator}. This is a generalization of
* {@link #reducing(Object, BinaryOperator)} which allows a transformation * {@link #reducing(Object, BinaryOperator)} which allows a transformation
...@@ -524,17 +627,17 @@ public final class Collectors { ...@@ -524,17 +627,17 @@ public final class Collectors {
* @apiNote * @apiNote
* The {@code reducing()} collectors are most useful when used in a * The {@code reducing()} collectors are most useful when used in a
* multi-level reduction, downstream of {@code groupingBy} or * multi-level reduction, downstream of {@code groupingBy} or
* {@code partitioningBy}. To perform a simple reduction on a stream, * {@code partitioningBy}. To perform a simple map-reduce on a stream,
* use {@link Stream#reduce(BinaryOperator)} instead. * use {@link Stream#map(Function)} and {@link Stream#reduce(Object, BinaryOperator)}
* instead.
* *
* <p>For example, given a stream of {@code Person}, to calculate the longest * <p>For example, given a stream of {@code Person}, to calculate the longest
* last name of residents in each city: * last name of residents in each city:
* <pre>{@code * <pre>{@code
* Comparator<String> byLength = Comparator.comparing(String::length); * Comparator<String> byLength = Comparator.comparing(String::length);
* BinaryOperator<String> longerOf = BinaryOperator.greaterOf(byLength);
* Map<City, String> longestLastNameByCity * Map<City, String> longestLastNameByCity
* = people.stream().collect(groupingBy(Person::getCity, * = people.stream().collect(groupingBy(Person::getCity,
* reducing(Person::getLastName, longerOf))); * reducing(Person::getLastName, BinaryOperator.maxBy(byLength))));
* }</pre> * }</pre>
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
...@@ -549,18 +652,20 @@ public final class Collectors { ...@@ -549,18 +652,20 @@ public final class Collectors {
* @see #reducing(BinaryOperator) * @see #reducing(BinaryOperator)
*/ */
public static <T, U> public static <T, U>
Collector<T, U> reducing(U identity, Collector<T, ?, U> reducing(U identity,
Function<? super T, ? extends U> mapper, Function<? super T, ? extends U> mapper,
BinaryOperator<U> op) { BinaryOperator<U> op) {
return new CollectorImpl<>(() -> identity, return new CollectorImpl<>(
(r, t) -> (r == null ? mapper.apply(t) : op.apply(r, mapper.apply(t))), boxSupplier(identity),
op); (a, t) -> { a[0] = op.apply(a[0], mapper.apply(t)); },
(a, b) -> { a[0] = op.apply(a[0], b[0]); return a; },
a -> a[0], CH_NOID);
} }
/** /**
* Returns a {@code Collector} implementing a "group by" operation on * Returns a {@code Collector} implementing a "group by" operation on
* input elements of type {@code T}, grouping elements according to a * input elements of type {@code T}, grouping elements according to a
* classification function. * classification function, and returning the results in a {@code Map}.
* *
* <p>The classification function maps elements to some key type {@code K}. * <p>The classification function maps elements to some key type {@code K}.
* The collector produces a {@code Map<K, List<T>>} whose keys are the * The collector produces a {@code Map<K, List<T>>} whose keys are the
...@@ -586,9 +691,9 @@ public final class Collectors { ...@@ -586,9 +691,9 @@ public final class Collectors {
* @see #groupingBy(Function, Supplier, Collector) * @see #groupingBy(Function, Supplier, Collector)
* @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function)
*/ */
public static <T, K> public static <T, K> Collector<T, ?, Map<K, List<T>>>
Collector<T, Map<K, List<T>>> groupingBy(Function<? super T, ? extends K> classifier) { groupingBy(Function<? super T, ? extends K> classifier) {
return groupingBy(classifier, HashMap::new, toList()); return groupingBy(classifier, toList());
} }
/** /**
...@@ -615,6 +720,7 @@ public final class Collectors { ...@@ -615,6 +720,7 @@ public final class Collectors {
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param <K> the type of the keys * @param <K> the type of the keys
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction * @param <D> the result type of the downstream reduction
* @param classifier a classifier function mapping input elements to keys * @param classifier a classifier function mapping input elements to keys
* @param downstream a {@code Collector} implementing the downstream reduction * @param downstream a {@code Collector} implementing the downstream reduction
...@@ -624,9 +730,9 @@ public final class Collectors { ...@@ -624,9 +730,9 @@ public final class Collectors {
* @see #groupingBy(Function, Supplier, Collector) * @see #groupingBy(Function, Supplier, Collector)
* @see #groupingByConcurrent(Function, Collector) * @see #groupingByConcurrent(Function, Collector)
*/ */
public static <T, K, D> public static <T, K, A, D>
Collector<T, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, Collector<T, ?, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier,
Collector<? super T, D> downstream) { Collector<? super T, A, D> downstream) {
return groupingBy(classifier, HashMap::new, downstream); return groupingBy(classifier, HashMap::new, downstream);
} }
...@@ -653,6 +759,7 @@ public final class Collectors { ...@@ -653,6 +759,7 @@ public final class Collectors {
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param <K> the type of the keys * @param <K> the type of the keys
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction * @param <D> the result type of the downstream reduction
* @param <M> the type of the resulting {@code Map} * @param <M> the type of the resulting {@code Map}
* @param classifier a classifier function mapping input elements to keys * @param classifier a classifier function mapping input elements to keys
...@@ -665,25 +772,39 @@ public final class Collectors { ...@@ -665,25 +772,39 @@ public final class Collectors {
* @see #groupingBy(Function) * @see #groupingBy(Function)
* @see #groupingByConcurrent(Function, Supplier, Collector) * @see #groupingByConcurrent(Function, Supplier, Collector)
*/ */
public static <T, K, D, M extends Map<K, D>> public static <T, K, D, A, M extends Map<K, D>>
Collector<T, M> groupingBy(Function<? super T, ? extends K> classifier, Collector<T, ?, M> groupingBy(Function<? super T, ? extends K> classifier,
Supplier<M> mapFactory, Supplier<M> mapFactory,
Collector<? super T, D> downstream) { Collector<? super T, A, D> downstream) {
Supplier<D> downstreamSupplier = downstream.resultSupplier(); Supplier<A> downstreamSupplier = downstream.supplier();
BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator();
BiFunction<M, T, M> accumulator = (m, t) -> { BiConsumer<Map<K, A>, T> accumulator = (m, t) -> {
K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key");
D oldContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); A container = m.computeIfAbsent(key, k -> downstreamSupplier.get());
D newContainer = downstreamAccumulator.apply(oldContainer, t); downstreamAccumulator.accept(container, t);
if (newContainer != oldContainer)
m.put(key, newContainer);
return m;
}; };
return new CollectorImpl<>(mapFactory, accumulator, mapMerger(downstream.combiner()), CH_STRICT); BinaryOperator<Map<K, A>> merger = Collectors.<K, A, Map<K, A>>mapMerger(downstream.combiner());
@SuppressWarnings("unchecked")
Supplier<Map<K, A>> mangledFactory = (Supplier<Map<K, A>>) mapFactory;
if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) {
return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_ID);
}
else {
@SuppressWarnings("unchecked")
Function<A, A> downstreamFinisher = (Function<A, A>) downstream.finisher();
Function<Map<K, A>, M> finisher = intermediate -> {
intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v));
@SuppressWarnings("unchecked")
M castResult = (M) intermediate;
return castResult;
};
return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_NOID);
}
} }
/** /**
* Returns a {@code Collector} implementing a concurrent "group by" * Returns a concurrent {@code Collector} implementing a "group by"
* operation on input elements of type {@code T}, grouping elements * operation on input elements of type {@code T}, grouping elements
* according to a classification function. * according to a classification function.
* *
...@@ -716,12 +837,13 @@ public final class Collectors { ...@@ -716,12 +837,13 @@ public final class Collectors {
* @see #groupingByConcurrent(Function, Supplier, Collector) * @see #groupingByConcurrent(Function, Supplier, Collector)
*/ */
public static <T, K> public static <T, K>
Collector<T, ConcurrentMap<K, List<T>>> groupingByConcurrent(Function<? super T, ? extends K> classifier) { Collector<T, ?, ConcurrentMap<K, List<T>>>
groupingByConcurrent(Function<? super T, ? extends K> classifier) {
return groupingByConcurrent(classifier, ConcurrentHashMap::new, toList()); return groupingByConcurrent(classifier, ConcurrentHashMap::new, toList());
} }
/** /**
* Returns a {@code Collector} implementing a concurrent cascaded "group by" * Returns a concurrent {@code Collector} implementing a cascaded "group by"
* operation on input elements of type {@code T}, grouping elements * operation on input elements of type {@code T}, grouping elements
* according to a classification function, and then performing a reduction * according to a classification function, and then performing a reduction
* operation on the values associated with a given key using the specified * operation on the values associated with a given key using the specified
...@@ -739,12 +861,13 @@ public final class Collectors { ...@@ -739,12 +861,13 @@ public final class Collectors {
* where the city names are sorted: * where the city names are sorted:
* <pre>{@code * <pre>{@code
* ConcurrentMap<City, Set<String>> namesByCity * ConcurrentMap<City, Set<String>> namesByCity
* = people.stream().collect(groupingByConcurrent(Person::getCity, TreeMap::new, * = people.stream().collect(groupingByConcurrent(Person::getCity, ConcurrentSkipListMap::new,
* mapping(Person::getLastName, toSet()))); * mapping(Person::getLastName, toSet())));
* }</pre> * }</pre>
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param <K> the type of the keys * @param <K> the type of the keys
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction * @param <D> the result type of the downstream reduction
* @param classifier a classifier function mapping input elements to keys * @param classifier a classifier function mapping input elements to keys
* @param downstream a {@code Collector} implementing the downstream reduction * @param downstream a {@code Collector} implementing the downstream reduction
...@@ -754,9 +877,9 @@ public final class Collectors { ...@@ -754,9 +877,9 @@ public final class Collectors {
* @see #groupingByConcurrent(Function) * @see #groupingByConcurrent(Function)
* @see #groupingByConcurrent(Function, Supplier, Collector) * @see #groupingByConcurrent(Function, Supplier, Collector)
*/ */
public static <T, K, D> public static <T, K, A, D>
Collector<T, ConcurrentMap<K, D>> groupingByConcurrent(Function<? super T, ? extends K> classifier, Collector<T, ?, ConcurrentMap<K, D>> groupingByConcurrent(Function<? super T, ? extends K> classifier,
Collector<? super T, D> downstream) { Collector<? super T, A, D> downstream) {
return groupingByConcurrent(classifier, ConcurrentHashMap::new, downstream); return groupingByConcurrent(classifier, ConcurrentHashMap::new, downstream);
} }
...@@ -787,6 +910,7 @@ public final class Collectors { ...@@ -787,6 +910,7 @@ public final class Collectors {
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param <K> the type of the keys * @param <K> the type of the keys
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction * @param <D> the result type of the downstream reduction
* @param <M> the type of the resulting {@code ConcurrentMap} * @param <M> the type of the resulting {@code ConcurrentMap}
* @param classifier a classifier function mapping input elements to keys * @param classifier a classifier function mapping input elements to keys
...@@ -799,51 +923,46 @@ public final class Collectors { ...@@ -799,51 +923,46 @@ public final class Collectors {
* @see #groupingByConcurrent(Function, Collector) * @see #groupingByConcurrent(Function, Collector)
* @see #groupingBy(Function, Supplier, Collector) * @see #groupingBy(Function, Supplier, Collector)
*/ */
public static <T, K, D, M extends ConcurrentMap<K, D>> public static <T, K, A, D, M extends ConcurrentMap<K, D>>
Collector<T, M> groupingByConcurrent(Function<? super T, ? extends K> classifier, Collector<T, ?, M> groupingByConcurrent(Function<? super T, ? extends K> classifier,
Supplier<M> mapFactory, Supplier<M> mapFactory,
Collector<? super T, D> downstream) { Collector<? super T, A, D> downstream) {
Supplier<D> downstreamSupplier = downstream.resultSupplier(); Supplier<A> downstreamSupplier = downstream.supplier();
BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); BiConsumer<A, ? super T> downstreamAccumulator = downstream.accumulator();
BinaryOperator<M> combiner = mapMerger(downstream.combiner()); BinaryOperator<ConcurrentMap<K, A>> merger = Collectors.<K, A, ConcurrentMap<K, A>>mapMerger(downstream.combiner());
@SuppressWarnings("unchecked")
Supplier<ConcurrentMap<K, A>> mangledFactory = (Supplier<ConcurrentMap<K, A>>) mapFactory;
BiConsumer<ConcurrentMap<K, A>, T> accumulator;
if (downstream.characteristics().contains(Collector.Characteristics.CONCURRENT)) { if (downstream.characteristics().contains(Collector.Characteristics.CONCURRENT)) {
BiFunction<M, T, M> accumulator = (m, t) -> { accumulator = (m, t) -> {
K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key");
downstreamAccumulator.apply(m.computeIfAbsent(key, k -> downstreamSupplier.get()), t); A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get());
return m; downstreamAccumulator.accept(resultContainer, t);
}; };
return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); }
} else if (downstream.characteristics().contains(Collector.Characteristics.STRICTLY_MUTATIVE)) { else {
BiFunction<M, T, M> accumulator = (m, t) -> { accumulator = (m, t) -> {
K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key");
D resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get()); A resultContainer = m.computeIfAbsent(key, k -> downstreamSupplier.get());
synchronized (resultContainer) { synchronized (resultContainer) {
downstreamAccumulator.apply(resultContainer, t); downstreamAccumulator.accept(resultContainer, t);
} }
return m;
}; };
return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); }
} else {
BiFunction<M, T, M> accumulator = (m, t) -> { if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) {
K key = Objects.requireNonNull(classifier.apply(t), "element cannot be mapped to a null key"); return new CollectorImpl<>(mangledFactory, accumulator, merger, CH_CONCURRENT_ID);
do { }
D oldResult = m.computeIfAbsent(key, k -> downstreamSupplier.get()); else {
if (oldResult == null) { @SuppressWarnings("unchecked")
if (m.putIfAbsent(key, downstreamAccumulator.apply(null, t)) == null) Function<A, A> downstreamFinisher = (Function<A, A>) downstream.finisher();
return m; Function<ConcurrentMap<K, A>, M> finisher = intermediate -> {
} else { intermediate.replaceAll((k, v) -> downstreamFinisher.apply(v));
synchronized (oldResult) { @SuppressWarnings("unchecked")
if (m.get(key) != oldResult) M castResult = (M) intermediate;
continue; return castResult;
D newResult = downstreamAccumulator.apply(oldResult, t);
if (oldResult != newResult)
m.put(key, newResult);
return m;
}
}
} while (true);
}; };
return new CollectorImpl<>(mapFactory, accumulator, combiner, CH_CONCURRENT); return new CollectorImpl<>(mangledFactory, accumulator, merger, finisher, CH_CONCURRENT_NOID);
} }
} }
...@@ -862,7 +981,7 @@ public final class Collectors { ...@@ -862,7 +981,7 @@ public final class Collectors {
* @see #partitioningBy(Predicate, Collector) * @see #partitioningBy(Predicate, Collector)
*/ */
public static <T> public static <T>
Collector<T, Map<Boolean, List<T>>> partitioningBy(Predicate<? super T> predicate) { Collector<T, ?, Map<Boolean, List<T>>> partitioningBy(Predicate<? super T> predicate) {
return partitioningBy(predicate, toList()); return partitioningBy(predicate, toList());
} }
...@@ -877,6 +996,7 @@ public final class Collectors { ...@@ -877,6 +996,7 @@ public final class Collectors {
* serializability, or thread-safety of the {@code Map} returned. * serializability, or thread-safety of the {@code Map} returned.
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param <A> the intermediate accumulation type of the downstream collector
* @param <D> the result type of the downstream reduction * @param <D> the result type of the downstream reduction
* @param predicate a predicate used for classifying input elements * @param predicate a predicate used for classifying input elements
* @param downstream a {@code Collector} implementing the downstream * @param downstream a {@code Collector} implementing the downstream
...@@ -886,52 +1006,43 @@ public final class Collectors { ...@@ -886,52 +1006,43 @@ public final class Collectors {
* *
* @see #partitioningBy(Predicate) * @see #partitioningBy(Predicate)
*/ */
public static <T, D> public static <T, D, A>
Collector<T, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate, Collector<T, ?, Map<Boolean, D>> partitioningBy(Predicate<? super T> predicate,
Collector<? super T, D> downstream) { Collector<? super T, A, D> downstream) {
BiFunction<D, ? super T, D> downstreamAccumulator = downstream.accumulator(); @SuppressWarnings("unchecked")
BiFunction<Map<Boolean, D>, T, Map<Boolean, D>> accumulator = (result, t) -> { BiConsumer<D, ? super T> downstreamAccumulator = (BiConsumer<D, ? super T>) downstream.accumulator();
BiConsumer<Map<Boolean, A>, T> accumulator = (result, t) -> {
Partition<D> asPartition = ((Partition<D>) result); Partition<D> asPartition = ((Partition<D>) result);
if (predicate.test(t)) { downstreamAccumulator.accept(predicate.test(t) ? asPartition.forTrue : asPartition.forFalse, t);
D newResult = downstreamAccumulator.apply(asPartition.forTrue, t);
if (newResult != asPartition.forTrue)
asPartition.forTrue = newResult;
} else {
D newResult = downstreamAccumulator.apply(asPartition.forFalse, t);
if (newResult != asPartition.forFalse)
asPartition.forFalse = newResult;
}
return result;
}; };
return new CollectorImpl<>(() -> new Partition<>(downstream.resultSupplier().get(), BinaryOperator<A> op = downstream.combiner();
downstream.resultSupplier().get()), BinaryOperator<Map<Boolean, A>> merger = (m1, m2) -> {
accumulator, partitionMerger(downstream.combiner()), CH_STRICT); Partition<A> left = (Partition<A>) m1;
Partition<A> right = (Partition<A>) m2;
return new Partition<>(op.apply(left.forTrue, right.forTrue),
op.apply(left.forFalse, right.forFalse));
};
Supplier<Map<Boolean, A>> supplier = () -> new Partition<>(downstream.supplier().get(),
downstream.supplier().get());
if (downstream.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) {
return new CollectorImpl<>(supplier, accumulator, merger, CH_ID);
} }
else {
/** Function<Map<Boolean, A>, Map<Boolean, D>> finisher = (Map<Boolean, A> par) -> {
* Merge function for two partitions, given a merge function for the Partition<A> asAPartition = (Partition<A>) par;
* elements. return new Partition<>(downstream.finisher().apply(asAPartition.forTrue),
*/ downstream.finisher().apply(asAPartition.forFalse));
private static <D> BinaryOperator<Map<Boolean, D>> partitionMerger(BinaryOperator<D> op) {
return (m1, m2) -> {
Partition<D> left = (Partition<D>) m1;
Partition<D> right = (Partition<D>) m2;
if (left.forFalse == null)
left.forFalse = right.forFalse;
else if (right.forFalse != null)
left.forFalse = op.apply(left.forFalse, right.forFalse);
if (left.forTrue == null)
left.forTrue = right.forTrue;
else if (right.forTrue != null)
left.forTrue = op.apply(left.forTrue, right.forTrue);
return left;
}; };
return new CollectorImpl<>(supplier, accumulator, merger, finisher, CH_NOID);
}
} }
/** /**
* Accumulate elements into a {@code Map} whose keys and values are the * Returns a {@code Collector} that accumulate elements into a
* result of applying mapping functions to the input elements. * {@code Map} whose keys and values are the result of applying the provided
* If the mapped keys contains duplicates (according to * mapping functions to the input elements.
*
* <p>If the mapped keys contains duplicates (according to
* {@link Object#equals(Object)}), an {@code IllegalStateException} is * {@link Object#equals(Object)}), an {@code IllegalStateException} is
* thrown when the collection operation is performed. If the mapped keys * thrown when the collection operation is performed. If the mapped keys
* may have duplicates, use {@link #toMap(Function, Function, BinaryOperator)} * may have duplicates, use {@link #toMap(Function, Function, BinaryOperator)}
...@@ -970,24 +1081,26 @@ public final class Collectors { ...@@ -970,24 +1081,26 @@ public final class Collectors {
* @see #toConcurrentMap(Function, Function) * @see #toConcurrentMap(Function, Function)
*/ */
public static <T, K, U> public static <T, K, U>
Collector<T, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, Collector<T, ?, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper) { Function<? super T, ? extends U> valueMapper) {
return toMap(keyMapper, valueMapper, throwingMerger(), HashMap::new); return toMap(keyMapper, valueMapper, throwingMerger(), HashMap::new);
} }
/** /**
* Accumulate elements into a {@code Map} whose keys and values are the * Returns a {@code Collector} that accumulate elements into a
* result of applying mapping functions to the input elements. If the mapped * {@code Map} whose keys and values are the result of applying the provided
* mapping functions to the input elements.
*
* <p>If the mapped
* keys contains duplicates (according to {@link Object#equals(Object)}), * keys contains duplicates (according to {@link Object#equals(Object)}),
* the value mapping function is applied to each equal element, and the * the value mapping function is applied to each equal element, and the
* results are merged using the provided merging function. * results are merged using the provided merging function.
* *
* @apiNote * @apiNote
* There are multiple ways to deal with collisions between multiple elements * There are multiple ways to deal with collisions between multiple elements
* mapping to the same key. There are some predefined merging functions, * mapping to the same key. The other forms of {@code toMap} simply use
* such as {@link #throwingMerger()}, {@link #firstWinsMerger()}, and * a merge function that throws unconditionally, but you can easily write
* {@link #lastWinsMerger()}, that implement common policies, or you can * more flexible merge policies. For example, if you have a stream
* implement custom policies easily. For example, if you have a stream
* of {@code Person}, and you want to produce a "phone book" mapping name to * of {@code Person}, and you want to produce a "phone book" mapping name to
* address, but it is possible that two persons have the same name, you can * address, but it is possible that two persons have the same name, you can
* do as follows to gracefully deals with these collisions, and produce a * do as follows to gracefully deals with these collisions, and produce a
...@@ -1018,15 +1131,18 @@ public final class Collectors { ...@@ -1018,15 +1131,18 @@ public final class Collectors {
* @see #toConcurrentMap(Function, Function, BinaryOperator) * @see #toConcurrentMap(Function, Function, BinaryOperator)
*/ */
public static <T, K, U> public static <T, K, U>
Collector<T, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper, Collector<T, ?, Map<K,U>> toMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper, Function<? super T, ? extends U> valueMapper,
BinaryOperator<U> mergeFunction) { BinaryOperator<U> mergeFunction) {
return toMap(keyMapper, valueMapper, mergeFunction, HashMap::new); return toMap(keyMapper, valueMapper, mergeFunction, HashMap::new);
} }
/** /**
* Accumulate elements into a {@code Map} whose keys and values are the * Returns a {@code Collector} that accumulate elements into a
* result of applying mapping functions to the input elements. If the mapped * {@code Map} whose keys and values are the result of applying the provided
* mapping functions to the input elements.
*
* <p>If the mapped
* keys contains duplicates (according to {@link Object#equals(Object)}), * keys contains duplicates (according to {@link Object#equals(Object)}),
* the value mapping function is applied to each equal element, and the * the value mapping function is applied to each equal element, and the
* results are merged using the provided merging function. The {@code Map} * results are merged using the provided merging function. The {@code Map}
...@@ -1054,22 +1170,22 @@ public final class Collectors { ...@@ -1054,22 +1170,22 @@ public final class Collectors {
* @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier)
*/ */
public static <T, K, U, M extends Map<K, U>> public static <T, K, U, M extends Map<K, U>>
Collector<T, M> toMap(Function<? super T, ? extends K> keyMapper, Collector<T, ?, M> toMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper, Function<? super T, ? extends U> valueMapper,
BinaryOperator<U> mergeFunction, BinaryOperator<U> mergeFunction,
Supplier<M> mapSupplier) { Supplier<M> mapSupplier) {
BiFunction<M, T, M> accumulator BiConsumer<M, T> accumulator
= (map, element) -> { = (map, element) -> map.merge(keyMapper.apply(element),
map.merge(keyMapper.apply(element), valueMapper.apply(element), mergeFunction); valueMapper.apply(element), mergeFunction);
return map; return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_ID);
};
return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_STRICT);
} }
/** /**
* Accumulate elements into a {@code ConcurrentMap} whose keys and values * Returns a {@code Collector} that accumulate elements into a
* are the result of applying mapping functions to the input elements. * {@code ConcurrentMap} whose keys and values are the result of applying
* If the mapped keys contains duplicates (according to * the provided mapping functions to the input elements.
*
* <p>If the mapped keys contains duplicates (according to
* {@link Object#equals(Object)}), an {@code IllegalStateException} is * {@link Object#equals(Object)}), an {@code IllegalStateException} is
* thrown when the collection operation is performed. If the mapped keys * thrown when the collection operation is performed. If the mapped keys
* may have duplicates, use * may have duplicates, use
...@@ -1112,24 +1228,25 @@ public final class Collectors { ...@@ -1112,24 +1228,25 @@ public final class Collectors {
* @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier) * @see #toConcurrentMap(Function, Function, BinaryOperator, Supplier)
*/ */
public static <T, K, U> public static <T, K, U>
Collector<T, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Collector<T, ?, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper) { Function<? super T, ? extends U> valueMapper) {
return toConcurrentMap(keyMapper, valueMapper, throwingMerger(), ConcurrentHashMap::new); return toConcurrentMap(keyMapper, valueMapper, throwingMerger(), ConcurrentHashMap::new);
} }
/** /**
* Accumulate elements into a {@code ConcurrentMap} whose keys and values * Returns a {@code Collector} that accumulate elements into a
* are the result of applying mapping functions to the input elements. If * {@code ConcurrentMap} whose keys and values are the result of applying
* the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the provided mapping functions to the input elements.
*
* <p>If the mapped keys contains duplicates (according to {@link Object#equals(Object)}),
* the value mapping function is applied to each equal element, and the * the value mapping function is applied to each equal element, and the
* results are merged using the provided merging function. * results are merged using the provided merging function.
* *
* @apiNote * @apiNote
* There are multiple ways to deal with collisions between multiple elements * There are multiple ways to deal with collisions between multiple elements
* mapping to the same key. There are some predefined merging functions, * mapping to the same key. The other forms of {@code toConcurrentMap} simply use
* such as {@link #throwingMerger()}, {@link #firstWinsMerger()}, and * a merge function that throws unconditionally, but you can easily write
* {@link #lastWinsMerger()}, that implement common policies, or you can * more flexible merge policies. For example, if you have a stream
* implement custom policies easily. For example, if you have a stream
* of {@code Person}, and you want to produce a "phone book" mapping name to * of {@code Person}, and you want to produce a "phone book" mapping name to
* address, but it is possible that two persons have the same name, you can * address, but it is possible that two persons have the same name, you can
* do as follows to gracefully deals with these collisions, and produce a * do as follows to gracefully deals with these collisions, and produce a
...@@ -1163,16 +1280,19 @@ public final class Collectors { ...@@ -1163,16 +1280,19 @@ public final class Collectors {
* @see #toMap(Function, Function, BinaryOperator) * @see #toMap(Function, Function, BinaryOperator)
*/ */
public static <T, K, U> public static <T, K, U>
Collector<T, ConcurrentMap<K,U>> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Collector<T, ?, ConcurrentMap<K,U>>
toConcurrentMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper, Function<? super T, ? extends U> valueMapper,
BinaryOperator<U> mergeFunction) { BinaryOperator<U> mergeFunction) {
return toConcurrentMap(keyMapper, valueMapper, mergeFunction, ConcurrentHashMap::new); return toConcurrentMap(keyMapper, valueMapper, mergeFunction, ConcurrentHashMap::new);
} }
/** /**
* Accumulate elements into a {@code ConcurrentMap} whose keys and values * Returns a {@code Collector} that accumulate elements into a
* are the result of applying mapping functions to the input elements. If * {@code ConcurrentMap} whose keys and values are the result of applying
* the mapped keys contains duplicates (according to {@link Object#equals(Object)}), * the provided mapping functions to the input elements.
*
* <p>If the mapped keys contains duplicates (according to {@link Object#equals(Object)}),
* the value mapping function is applied to each equal element, and the * the value mapping function is applied to each equal element, and the
* results are merged using the provided merging function. The * results are merged using the provided merging function. The
* {@code ConcurrentMap} is created by a provided supplier function. * {@code ConcurrentMap} is created by a provided supplier function.
...@@ -1202,15 +1322,14 @@ public final class Collectors { ...@@ -1202,15 +1322,14 @@ public final class Collectors {
* @see #toMap(Function, Function, BinaryOperator, Supplier) * @see #toMap(Function, Function, BinaryOperator, Supplier)
*/ */
public static <T, K, U, M extends ConcurrentMap<K, U>> public static <T, K, U, M extends ConcurrentMap<K, U>>
Collector<T, M> toConcurrentMap(Function<? super T, ? extends K> keyMapper, Collector<T, ?, M> toConcurrentMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper, Function<? super T, ? extends U> valueMapper,
BinaryOperator<U> mergeFunction, BinaryOperator<U> mergeFunction,
Supplier<M> mapSupplier) { Supplier<M> mapSupplier) {
BiFunction<M, T, M> accumulator = (map, element) -> { BiConsumer<M, T> accumulator
map.merge(keyMapper.apply(element), valueMapper.apply(element), mergeFunction); = (map, element) -> map.merge(keyMapper.apply(element),
return map; valueMapper.apply(element), mergeFunction);
}; return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_CONCURRENT_ID);
return new CollectorImpl<>(mapSupplier, accumulator, mapMerger(mergeFunction), CH_CONCURRENT);
} }
/** /**
...@@ -1222,14 +1341,15 @@ public final class Collectors { ...@@ -1222,14 +1341,15 @@ public final class Collectors {
* @param mapper a mapping function to apply to each element * @param mapper a mapping function to apply to each element
* @return a {@code Collector} implementing the summary-statistics reduction * @return a {@code Collector} implementing the summary-statistics reduction
* *
* @see #toDoubleSummaryStatistics(ToDoubleFunction) * @see #summarizingDouble(ToDoubleFunction)
* @see #toLongSummaryStatistics(ToLongFunction) * @see #summarizingLong(ToLongFunction)
*/ */
public static <T> public static <T>
Collector<T, IntSummaryStatistics> toIntSummaryStatistics(ToIntFunction<? super T> mapper) { Collector<T, ?, IntSummaryStatistics> summarizingInt(ToIntFunction<? super T> mapper) {
return new CollectorImpl<>(IntSummaryStatistics::new, return new CollectorImpl<T, IntSummaryStatistics, IntSummaryStatistics>(
(r, t) -> { r.accept(mapper.applyAsInt(t)); return r; }, IntSummaryStatistics::new,
(l, r) -> { l.combine(r); return l; }, CH_STRICT); (r, t) -> r.accept(mapper.applyAsInt(t)),
(l, r) -> { l.combine(r); return l; }, CH_ID);
} }
/** /**
...@@ -1241,14 +1361,15 @@ public final class Collectors { ...@@ -1241,14 +1361,15 @@ public final class Collectors {
* @param mapper the mapping function to apply to each element * @param mapper the mapping function to apply to each element
* @return a {@code Collector} implementing the summary-statistics reduction * @return a {@code Collector} implementing the summary-statistics reduction
* *
* @see #toDoubleSummaryStatistics(ToDoubleFunction) * @see #summarizingDouble(ToDoubleFunction)
* @see #toIntSummaryStatistics(ToIntFunction) * @see #summarizingInt(ToIntFunction)
*/ */
public static <T> public static <T>
Collector<T, LongSummaryStatistics> toLongSummaryStatistics(ToLongFunction<? super T> mapper) { Collector<T, ?, LongSummaryStatistics> summarizingLong(ToLongFunction<? super T> mapper) {
return new CollectorImpl<>(LongSummaryStatistics::new, return new CollectorImpl<T, LongSummaryStatistics, LongSummaryStatistics>(
(r, t) -> { r.accept(mapper.applyAsLong(t)); return r; }, LongSummaryStatistics::new,
(l, r) -> { l.combine(r); return l; }, CH_STRICT); (r, t) -> r.accept(mapper.applyAsLong(t)),
(l, r) -> { l.combine(r); return l; }, CH_ID);
} }
/** /**
...@@ -1260,14 +1381,15 @@ public final class Collectors { ...@@ -1260,14 +1381,15 @@ public final class Collectors {
* @param mapper a mapping function to apply to each element * @param mapper a mapping function to apply to each element
* @return a {@code Collector} implementing the summary-statistics reduction * @return a {@code Collector} implementing the summary-statistics reduction
* *
* @see #toLongSummaryStatistics(ToLongFunction) * @see #summarizingLong(ToLongFunction)
* @see #toIntSummaryStatistics(ToIntFunction) * @see #summarizingInt(ToIntFunction)
*/ */
public static <T> public static <T>
Collector<T, DoubleSummaryStatistics> toDoubleSummaryStatistics(ToDoubleFunction<? super T> mapper) { Collector<T, ?, DoubleSummaryStatistics> summarizingDouble(ToDoubleFunction<? super T> mapper) {
return new CollectorImpl<>(DoubleSummaryStatistics::new, return new CollectorImpl<T, DoubleSummaryStatistics, DoubleSummaryStatistics>(
(r, t) -> { r.accept(mapper.applyAsDouble(t)); return r; }, DoubleSummaryStatistics::new,
(l, r) -> { l.combine(r); return l; }, CH_STRICT); (r, t) -> r.accept(mapper.applyAsDouble(t)),
(l, r) -> { l.combine(r); return l; }, CH_ID);
} }
/** /**
...@@ -1276,8 +1398,8 @@ public final class Collectors { ...@@ -1276,8 +1398,8 @@ public final class Collectors {
private static final class Partition<T> private static final class Partition<T>
extends AbstractMap<Boolean, T> extends AbstractMap<Boolean, T>
implements Map<Boolean, T> { implements Map<Boolean, T> {
T forTrue; final T forTrue;
T forFalse; final T forFalse;
Partition(T forTrue, T forFalse) { Partition(T forTrue, T forFalse) {
this.forTrue = forTrue; this.forTrue = forTrue;
...@@ -1289,24 +1411,9 @@ public final class Collectors { ...@@ -1289,24 +1411,9 @@ public final class Collectors {
return new AbstractSet<Map.Entry<Boolean, T>>() { return new AbstractSet<Map.Entry<Boolean, T>>() {
@Override @Override
public Iterator<Map.Entry<Boolean, T>> iterator() { public Iterator<Map.Entry<Boolean, T>> iterator() {
Map.Entry<Boolean, T> falseEntry = new SimpleImmutableEntry<>(false, forFalse);
return new Iterator<Map.Entry<Boolean, T>>() { Map.Entry<Boolean, T> trueEntry = new SimpleImmutableEntry<>(true, forTrue);
int state = 0; return Arrays.asList(falseEntry, trueEntry).iterator();
@Override
public boolean hasNext() {
return state < 2;
}
@Override
public Map.Entry<Boolean, T> next() {
if (state >= 2)
throw new NoSuchElementException();
return (state++ == 0)
? new SimpleImmutableEntry<>(false, forFalse)
: new SimpleImmutableEntry<>(true, forTrue);
}
};
} }
@Override @Override
......
...@@ -209,7 +209,7 @@ public class DelegatingStream<T> implements Stream<T> { ...@@ -209,7 +209,7 @@ public class DelegatingStream<T> implements Stream<T> {
} }
@Override @Override
public <R> R collect(Collector<? super T, R> collector) { public <R, A> R collect(Collector<? super T, A, ? extends R> collector) {
return delegate.collect(collector); return delegate.collect(collector);
} }
......
...@@ -527,7 +527,7 @@ public interface DoubleStream extends BaseStream<Double, DoubleStream> { ...@@ -527,7 +527,7 @@ public interface DoubleStream extends BaseStream<Double, DoubleStream> {
long count(); long count();
/** /**
* Returns an {@code OptionalDouble} describing the average of elements of * Returns an {@code OptionalDouble} describing the arithmetic mean of elements of
* this stream, or an empty optional if this stream is empty. The average * this stream, or an empty optional if this stream is empty. The average
* returned can vary depending upon the order in which elements are * returned can vary depending upon the order in which elements are
* encountered. This is due to accumulated rounding error in addition of * encountered. This is due to accumulated rounding error in addition of
......
...@@ -517,7 +517,7 @@ public interface IntStream extends BaseStream<Integer, IntStream> { ...@@ -517,7 +517,7 @@ public interface IntStream extends BaseStream<Integer, IntStream> {
long count(); long count();
/** /**
* Returns an {@code OptionalDouble} describing the average of elements of * Returns an {@code OptionalDouble} describing the arithmetic mean of elements of
* this stream, or an empty optional if this stream is empty. This is a * this stream, or an empty optional if this stream is empty. This is a
* special case of a * special case of a
* <a href="package-summary.html#MutableReduction">reduction</a>. * <a href="package-summary.html#MutableReduction">reduction</a>.
......
...@@ -517,7 +517,7 @@ public interface LongStream extends BaseStream<Long, LongStream> { ...@@ -517,7 +517,7 @@ public interface LongStream extends BaseStream<Long, LongStream> {
long count(); long count();
/** /**
* Returns an {@code OptionalDouble} describing the average of elements of * Returns an {@code OptionalDouble} describing the arithmetic mean of elements of
* this stream, or an empty optional if this stream is empty. This is a * this stream, or an empty optional if this stream is empty. This is a
* special case of a * special case of a
* <a href="package-summary.html#MutableReduction">reduction</a>. * <a href="package-summary.html#MutableReduction">reduction</a>.
......
...@@ -148,17 +148,17 @@ final class ReduceOps { ...@@ -148,17 +148,17 @@ final class ReduceOps {
* reference values. * reference values.
* *
* @param <T> the type of the input elements * @param <T> the type of the input elements
* @param <R> the type of the result * @param <I> the type of the intermediate reduction result
* @param collector a {@code Collector} defining the reduction * @param collector a {@code Collector} defining the reduction
* @return a {@code ReduceOp} implementing the reduction * @return a {@code ReduceOp} implementing the reduction
*/ */
public static <T,R> TerminalOp<T, R> public static <T, I> TerminalOp<T, I>
makeRef(Collector<? super T,R> collector) { makeRef(Collector<? super T, I, ?> collector) {
Supplier<R> supplier = Objects.requireNonNull(collector).resultSupplier(); Supplier<I> supplier = Objects.requireNonNull(collector).supplier();
BiFunction<R, ? super T, R> accumulator = collector.accumulator(); BiConsumer<I, ? super T> accumulator = collector.accumulator();
BinaryOperator<R> combiner = collector.combiner(); BinaryOperator<I> combiner = collector.combiner();
class ReducingSink extends Box<R> class ReducingSink extends Box<I>
implements AccumulatingSink<T, R, ReducingSink> { implements AccumulatingSink<T, I, ReducingSink> {
@Override @Override
public void begin(long size) { public void begin(long size) {
state = supplier.get(); state = supplier.get();
...@@ -166,9 +166,7 @@ final class ReduceOps { ...@@ -166,9 +166,7 @@ final class ReduceOps {
@Override @Override
public void accept(T t) { public void accept(T t) {
R newResult = accumulator.apply(state, t); accumulator.accept(state, t);
if (state != newResult)
state = newResult;
} }
@Override @Override
...@@ -176,7 +174,7 @@ final class ReduceOps { ...@@ -176,7 +174,7 @@ final class ReduceOps {
state = combiner.apply(state, other.state); state = combiner.apply(state, other.state);
} }
} }
return new ReduceOp<T, R, ReducingSink>(StreamShape.REFERENCE) { return new ReduceOp<T, I, ReducingSink>(StreamShape.REFERENCE) {
@Override @Override
public ReducingSink makeSink() { public ReducingSink makeSink() {
return new ReducingSink(); return new ReducingSink();
......
...@@ -490,16 +490,21 @@ abstract class ReferencePipeline<P_IN, P_OUT> ...@@ -490,16 +490,21 @@ abstract class ReferencePipeline<P_IN, P_OUT>
} }
@Override @Override
public final <R> R collect(Collector<? super P_OUT, R> collector) { public final <R, A> R collect(Collector<? super P_OUT, A, ? extends R> collector) {
A container;
if (isParallel() if (isParallel()
&& (collector.characteristics().contains(Collector.Characteristics.CONCURRENT)) && (collector.characteristics().contains(Collector.Characteristics.CONCURRENT))
&& (!isOrdered() || collector.characteristics().contains(Collector.Characteristics.UNORDERED))) { && (!isOrdered() || collector.characteristics().contains(Collector.Characteristics.UNORDERED))) {
R container = collector.resultSupplier().get(); container = collector.supplier().get();
BiFunction<R, ? super P_OUT, R> accumulator = collector.accumulator(); BiConsumer<A, ? super P_OUT> accumulator = collector.accumulator();
forEach(u -> accumulator.apply(container, u)); forEach(u -> accumulator.accept(container, u));
return container;
} }
return evaluate(ReduceOps.makeRef(collector)); else {
container = evaluate(ReduceOps.makeRef(collector));
}
return collector.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)
? (R) container
: collector.finisher().apply(container);
} }
@Override @Override
......
...@@ -651,12 +651,13 @@ public interface Stream<T> extends BaseStream<T, Stream<T>> { ...@@ -651,12 +651,13 @@ public interface Stream<T> extends BaseStream<T, Stream<T>> {
* }</pre> * }</pre>
* *
* @param <R> the type of the result * @param <R> the type of the result
* @param <A> the intermediate accumulation type of the {@code Collector}
* @param collector the {@code Collector} describing the reduction * @param collector the {@code Collector} describing the reduction
* @return the result of the reduction * @return the result of the reduction
* @see #collect(Supplier, BiConsumer, BiConsumer) * @see #collect(Supplier, BiConsumer, BiConsumer)
* @see Collectors * @see Collectors
*/ */
<R> R collect(Collector<? super T, R> collector); <R, A> R collect(Collector<? super T, A, ? extends R> collector);
/** /**
* Returns the minimum element of this stream according to the provided * Returns the minimum element of this stream according to the provided
......
...@@ -547,7 +547,7 @@ ...@@ -547,7 +547,7 @@
* List<String> l = new ArrayList(Arrays.asList("one", "two")); * List<String> l = new ArrayList(Arrays.asList("one", "two"));
* Stream<String> sl = l.stream(); * Stream<String> sl = l.stream();
* l.add("three"); * l.add("three");
* String s = sl.collect(toStringJoiner(" ")).toString(); * String s = sl.collect(joining(" "));
* }</pre> * }</pre>
* First a list is created consisting of two strings: "one"; and "two". Then a stream is created from that list. * First a list is created consisting of two strings: "one"; and "two". Then a stream is created from that list.
* Next the list is modified by adding a third string: "three". Finally the elements of the stream are collected * Next the list is modified by adding a third string: "three". Finally the elements of the stream are collected
...@@ -557,7 +557,7 @@ ...@@ -557,7 +557,7 @@
* <pre>{@code * <pre>{@code
* List<String> l = new ArrayList(Arrays.asList("one", "two")); * List<String> l = new ArrayList(Arrays.asList("one", "two"));
* Stream<String> sl = l.stream(); * Stream<String> sl = l.stream();
* String s = sl.peek(s -> l.add("BAD LAMBDA")).collect(toStringJoiner(" ")).toString(); * String s = sl.peek(s -> l.add("BAD LAMBDA")).collect(joining(" "));
* }</pre> * }</pre>
* then a {@code ConcurrentModificationException} will be thrown since the {@code peek} operation will attempt * then a {@code ConcurrentModificationException} will be thrown since the {@code peek} operation will attempt
* to add the string "BAD LAMBDA" to the list after the terminal operation has commenced. * to add the string "BAD LAMBDA" to the list after the terminal operation has commenced.
......
...@@ -40,17 +40,17 @@ public class FillableStringTest { ...@@ -40,17 +40,17 @@ public class FillableStringTest {
} }
public void testStringBuilder() { public void testStringBuilder() {
String s = generate().collect(Collectors.toStringBuilder()).toString(); String s = generate().collect(Collectors.joining());
assertEquals(s, "THREEFOURFIVE"); assertEquals(s, "THREEFOURFIVE");
} }
public void testStringBuffer() { public void testStringBuffer() {
String s = generate().collect(Collectors.toStringBuilder()).toString(); String s = generate().collect(Collectors.joining());
assertEquals(s, "THREEFOURFIVE"); assertEquals(s, "THREEFOURFIVE");
} }
public void testStringJoiner() { public void testStringJoiner() {
String s = generate().collect(Collectors.toStringJoiner("-")).toString(); String s = generate().collect(Collectors.joining("-"));
assertEquals(s, "THREE-FOUR-FIVE"); assertEquals(s, "THREE-FOUR-FIVE");
} }
} }
...@@ -36,7 +36,6 @@ import java.util.stream.Collectors; ...@@ -36,7 +36,6 @@ import java.util.stream.Collectors;
import java.util.stream.LambdaTestHelpers; import java.util.stream.LambdaTestHelpers;
import java.util.stream.OpTestCase; import java.util.stream.OpTestCase;
import java.util.stream.Stream; import java.util.stream.Stream;
import java.util.stream.StreamOpFlagTestHelper;
import java.util.stream.StreamTestDataProvider; import java.util.stream.StreamTestDataProvider;
import java.util.stream.TestData; import java.util.stream.TestData;
...@@ -59,13 +58,14 @@ import static java.util.stream.LambdaTestHelpers.pTrue; ...@@ -59,13 +58,14 @@ import static java.util.stream.LambdaTestHelpers.pTrue;
public class GroupByOpTest extends OpTestCase { public class GroupByOpTest extends OpTestCase {
public void testBypassCollect() { public void testBypassCollect() {
Collector<Integer, Map<Boolean, List<Integer>>> collector @SuppressWarnings("unchecked")
= Collectors.groupingBy(LambdaTestHelpers.forPredicate(pEven, true, false)); Collector<Integer, Map<Boolean, List<Integer>>, Map<Boolean, List<Integer>>> collector
= (Collector<Integer, Map<Boolean, List<Integer>>, Map<Boolean, List<Integer>>>) Collectors.groupingBy(LambdaTestHelpers.forPredicate(pEven, true, false));
Map<Boolean, List<Integer>> m = collector.resultSupplier().get(); Map<Boolean, List<Integer>> m = collector.supplier().get();
int[] ints = countTo(10).stream().mapToInt(e -> (int) e).toArray(); int[] ints = countTo(10).stream().mapToInt(e -> (int) e).toArray();
for (int i : ints) for (int i : ints)
m = collector.accumulator().apply(m, i); collector.accumulator().accept(m, i);
assertEquals(2, m.keySet().size()); assertEquals(2, m.keySet().size());
for(Collection<Integer> group : m.values()) { for(Collection<Integer> group : m.values()) {
...@@ -130,7 +130,7 @@ public class GroupByOpTest extends OpTestCase { ...@@ -130,7 +130,7 @@ public class GroupByOpTest extends OpTestCase {
// - Total number of values equals size of data // - Total number of values equals size of data
for (MapperData<Integer, ?> md : getMapperData(data)) { for (MapperData<Integer, ?> md : getMapperData(data)) {
Collector<Integer, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m); Collector<Integer, ?, Map<Object, List<Integer>>> tab = Collectors.groupingBy(md.m);
Map<Object, List<Integer>> result = Map<Object, List<Integer>> result =
withData(data) withData(data)
.terminal(s -> s, s -> s.collect(tab)) .terminal(s -> s, s -> s.collect(tab))
......
...@@ -43,9 +43,9 @@ import static java.util.stream.LambdaTestHelpers.countTo; ...@@ -43,9 +43,9 @@ import static java.util.stream.LambdaTestHelpers.countTo;
public class SummaryStatisticsTest extends OpTestCase { public class SummaryStatisticsTest extends OpTestCase {
public void testIntStatistics() { public void testIntStatistics() {
List<IntSummaryStatistics> instances = new ArrayList<>(); List<IntSummaryStatistics> instances = new ArrayList<>();
instances.add(countTo(1000).stream().collect(Collectors.toIntSummaryStatistics(i -> i))); instances.add(countTo(1000).stream().collect(Collectors.summarizingInt(i -> i)));
instances.add(countTo(1000).stream().mapToInt(i -> i).summaryStatistics()); instances.add(countTo(1000).stream().mapToInt(i -> i).summaryStatistics());
instances.add(countTo(1000).parallelStream().collect(Collectors.toIntSummaryStatistics(i -> i))); instances.add(countTo(1000).parallelStream().collect(Collectors.summarizingInt(i -> i)));
instances.add(countTo(1000).parallelStream().mapToInt(i -> i).summaryStatistics()); instances.add(countTo(1000).parallelStream().mapToInt(i -> i).summaryStatistics());
for (IntSummaryStatistics stats : instances) { for (IntSummaryStatistics stats : instances) {
...@@ -58,9 +58,9 @@ public class SummaryStatisticsTest extends OpTestCase { ...@@ -58,9 +58,9 @@ public class SummaryStatisticsTest extends OpTestCase {
public void testLongStatistics() { public void testLongStatistics() {
List<LongSummaryStatistics> instances = new ArrayList<>(); List<LongSummaryStatistics> instances = new ArrayList<>();
instances.add(countTo(1000).stream().collect(Collectors.toLongSummaryStatistics(i -> i))); instances.add(countTo(1000).stream().collect(Collectors.summarizingLong(i -> i)));
instances.add(countTo(1000).stream().mapToLong(i -> i).summaryStatistics()); instances.add(countTo(1000).stream().mapToLong(i -> i).summaryStatistics());
instances.add(countTo(1000).parallelStream().collect(Collectors.toLongSummaryStatistics(i -> i))); instances.add(countTo(1000).parallelStream().collect(Collectors.summarizingLong(i -> i)));
instances.add(countTo(1000).parallelStream().mapToLong(i -> i).summaryStatistics()); instances.add(countTo(1000).parallelStream().mapToLong(i -> i).summaryStatistics());
for (LongSummaryStatistics stats : instances) { for (LongSummaryStatistics stats : instances) {
...@@ -73,9 +73,9 @@ public class SummaryStatisticsTest extends OpTestCase { ...@@ -73,9 +73,9 @@ public class SummaryStatisticsTest extends OpTestCase {
public void testDoubleStatistics() { public void testDoubleStatistics() {
List<DoubleSummaryStatistics> instances = new ArrayList<>(); List<DoubleSummaryStatistics> instances = new ArrayList<>();
instances.add(countTo(1000).stream().collect(Collectors.toDoubleSummaryStatistics(i -> i))); instances.add(countTo(1000).stream().collect(Collectors.summarizingDouble(i -> i)));
instances.add(countTo(1000).stream().mapToDouble(i -> i).summaryStatistics()); instances.add(countTo(1000).stream().mapToDouble(i -> i).summaryStatistics());
instances.add(countTo(1000).parallelStream().collect(Collectors.toDoubleSummaryStatistics(i -> i))); instances.add(countTo(1000).parallelStream().collect(Collectors.summarizingDouble(i -> i)));
instances.add(countTo(1000).parallelStream().mapToDouble(i -> i).summaryStatistics()); instances.add(countTo(1000).parallelStream().mapToDouble(i -> i).summaryStatistics());
for (DoubleSummaryStatistics stats : instances) { for (DoubleSummaryStatistics stats : instances) {
......
...@@ -23,13 +23,17 @@ ...@@ -23,13 +23,17 @@
package org.openjdk.tests.java.util.stream; package org.openjdk.tests.java.util.stream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.StringJoiner;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.ConcurrentSkipListMap;
...@@ -53,7 +57,10 @@ import static java.util.stream.Collectors.groupingByConcurrent; ...@@ -53,7 +57,10 @@ import static java.util.stream.Collectors.groupingByConcurrent;
import static java.util.stream.Collectors.partitioningBy; import static java.util.stream.Collectors.partitioningBy;
import static java.util.stream.Collectors.reducing; import static java.util.stream.Collectors.reducing;
import static java.util.stream.Collectors.toCollection; import static java.util.stream.Collectors.toCollection;
import static java.util.stream.Collectors.toConcurrentMap;
import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toSet;
import static java.util.stream.LambdaTestHelpers.assertContents; import static java.util.stream.LambdaTestHelpers.assertContents;
import static java.util.stream.LambdaTestHelpers.assertContentsUnordered; import static java.util.stream.LambdaTestHelpers.assertContentsUnordered;
import static java.util.stream.LambdaTestHelpers.mDoubler; import static java.util.stream.LambdaTestHelpers.mDoubler;
...@@ -65,16 +72,6 @@ import static java.util.stream.LambdaTestHelpers.mDoubler; ...@@ -65,16 +72,6 @@ import static java.util.stream.LambdaTestHelpers.mDoubler;
*/ */
@SuppressWarnings({"rawtypes", "unchecked"}) @SuppressWarnings({"rawtypes", "unchecked"})
public class TabulatorsTest extends OpTestCase { public class TabulatorsTest extends OpTestCase {
// There are 8 versions of groupingBy:
// groupingBy: { map supplier, not } x { downstream collector, not } x { concurrent, not }
// There are 2 versions of partition: { map supplier, not }
// There are 4 versions of toMap
// mappedTo(function, mapSupplier?, mergeFunction?)
// Each variety needs at least one test
// Plus a variety of multi-level tests (groupBy(..., partition), partition(..., groupBy))
// Plus negative tests for mapping to null
// Each test should be matched by a nest of asserters (see TabulationAssertion...)
private static abstract class TabulationAssertion<T, U> { private static abstract class TabulationAssertion<T, U> {
abstract void assertValue(U value, abstract void assertValue(U value,
...@@ -101,7 +98,7 @@ public class TabulatorsTest extends OpTestCase { ...@@ -101,7 +98,7 @@ public class TabulatorsTest extends OpTestCase {
boolean ordered) throws ReflectiveOperationException { boolean ordered) throws ReflectiveOperationException {
if (!clazz.isAssignableFrom(map.getClass())) if (!clazz.isAssignableFrom(map.getClass()))
fail(String.format("Class mismatch in GroupedMapAssertion: %s, %s", clazz, map.getClass())); fail(String.format("Class mismatch in GroupedMapAssertion: %s, %s", clazz, map.getClass()));
assertContentsUnordered(map.keySet(), source.get().map(classifier).collect(Collectors.toSet())); assertContentsUnordered(map.keySet(), source.get().map(classifier).collect(toSet()));
for (Map.Entry<K, ? extends V> entry : map.entrySet()) { for (Map.Entry<K, ? extends V> entry : map.entrySet()) {
K key = entry.getKey(); K key = entry.getKey();
downstream.assertValue(entry.getValue(), downstream.assertValue(entry.getValue(),
...@@ -111,6 +108,39 @@ public class TabulatorsTest extends OpTestCase { ...@@ -111,6 +108,39 @@ public class TabulatorsTest extends OpTestCase {
} }
} }
static class ToMapAssertion<T, K, V, M extends Map<K,V>> extends TabulationAssertion<T, M> {
private final Class<? extends Map> clazz;
private final Function<T, K> keyFn;
private final Function<T, V> valueFn;
private final BinaryOperator<V> mergeFn;
ToMapAssertion(Function<T, K> keyFn,
Function<T, V> valueFn,
BinaryOperator<V> mergeFn,
Class<? extends Map> clazz) {
this.clazz = clazz;
this.keyFn = keyFn;
this.valueFn = valueFn;
this.mergeFn = mergeFn;
}
@Override
void assertValue(M map, Supplier<Stream<T>> source, boolean ordered) throws ReflectiveOperationException {
Set<K> uniqueKeys = source.get().map(keyFn).collect(toSet());
assertTrue(clazz.isAssignableFrom(map.getClass()));
assertEquals(uniqueKeys, map.keySet());
source.get().forEach(t -> {
K key = keyFn.apply(t);
V v = source.get()
.filter(e -> key.equals(keyFn.apply(e)))
.map(valueFn)
.reduce(mergeFn)
.get();
assertEquals(map.get(key), v);
});
}
}
static class PartitionAssertion<T, D> extends TabulationAssertion<T, Map<Boolean,D>> { static class PartitionAssertion<T, D> extends TabulationAssertion<T, Map<Boolean,D>> {
private final Predicate<T> predicate; private final Predicate<T> predicate;
private final TabulationAssertion<T,D> downstream; private final TabulationAssertion<T,D> downstream;
...@@ -204,7 +234,7 @@ public class TabulatorsTest extends OpTestCase { ...@@ -204,7 +234,7 @@ public class TabulatorsTest extends OpTestCase {
private <T> ResultAsserter<T> mapTabulationAsserter(boolean ordered) { private <T> ResultAsserter<T> mapTabulationAsserter(boolean ordered) {
return (act, exp, ord, par) -> { return (act, exp, ord, par) -> {
if (par & (!ordered || !ord)) { if (par && (!ordered || !ord)) {
TabulatorsTest.nestedMapEqualityAssertion(act, exp); TabulatorsTest.nestedMapEqualityAssertion(act, exp);
} }
else { else {
...@@ -215,7 +245,7 @@ public class TabulatorsTest extends OpTestCase { ...@@ -215,7 +245,7 @@ public class TabulatorsTest extends OpTestCase {
private<T, M extends Map> private<T, M extends Map>
void exerciseMapTabulation(TestData<T, Stream<T>> data, void exerciseMapTabulation(TestData<T, Stream<T>> data,
Collector<T, ? extends M> collector, Collector<T, ?, ? extends M> collector,
TabulationAssertion<T, M> assertion) TabulationAssertion<T, M> assertion)
throws ReflectiveOperationException { throws ReflectiveOperationException {
boolean ordered = !collector.characteristics().contains(Collector.Characteristics.UNORDERED); boolean ordered = !collector.characteristics().contains(Collector.Characteristics.UNORDERED);
...@@ -248,6 +278,172 @@ public class TabulatorsTest extends OpTestCase { ...@@ -248,6 +278,172 @@ public class TabulatorsTest extends OpTestCase {
assertEquals(o1, o2); assertEquals(o1, o2);
} }
private<T, R> void assertCollect(TestData.OfRef<T> data,
Collector<T, ?, R> collector,
Function<Stream<T>, R> streamReduction) {
R check = streamReduction.apply(data.stream());
withData(data).terminal(s -> s.collect(collector)).expectedResult(check).exercise();
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testReduce(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
assertCollect(data, Collectors.reducing(0, Integer::sum),
s -> s.reduce(0, Integer::sum));
assertCollect(data, Collectors.reducing(Integer.MAX_VALUE, Integer::min),
s -> s.min(Integer::compare).orElse(Integer.MAX_VALUE));
assertCollect(data, Collectors.reducing(Integer.MIN_VALUE, Integer::max),
s -> s.max(Integer::compare).orElse(Integer.MIN_VALUE));
assertCollect(data, Collectors.reducing(Integer::sum),
s -> s.reduce(Integer::sum));
assertCollect(data, Collectors.minBy(Comparator.naturalOrder()),
s -> s.min(Integer::compare));
assertCollect(data, Collectors.maxBy(Comparator.naturalOrder()),
s -> s.max(Integer::compare));
assertCollect(data, Collectors.reducing(0, x -> x*2, Integer::sum),
s -> s.map(x -> x*2).reduce(0, Integer::sum));
assertCollect(data, Collectors.summingLong(x -> x * 2L),
s -> s.map(x -> x*2L).reduce(0L, Long::sum));
assertCollect(data, Collectors.summingInt(x -> x * 2),
s -> s.map(x -> x*2).reduce(0, Integer::sum));
assertCollect(data, Collectors.summingDouble(x -> x * 2.0d),
s -> s.map(x -> x * 2.0d).reduce(0.0d, Double::sum));
assertCollect(data, Collectors.averagingInt(x -> x * 2),
s -> s.mapToInt(x -> x * 2).average().orElse(0));
assertCollect(data, Collectors.averagingLong(x -> x * 2),
s -> s.mapToLong(x -> x * 2).average().orElse(0));
assertCollect(data, Collectors.averagingDouble(x -> x * 2),
s -> s.mapToDouble(x -> x * 2).average().orElse(0));
// Test explicit Collector.of
Collector<Integer, long[], Double> avg2xint = Collector.of(() -> new long[2],
(a, b) -> {
a[0] += b * 2;
a[1]++;
},
(a, b) -> {
a[0] += b[0];
a[1] += b[1];
return a;
},
a -> a[1] == 0 ? 0.0d : (double) a[0] / a[1]);
assertCollect(data, avg2xint,
s -> s.mapToInt(x -> x * 2).average().orElse(0));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testJoin(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
withData(data)
.terminal(s -> s.map(Object::toString).collect(Collectors.joining()))
.expectedResult(join(data, ""))
.exercise();
Collector<String, StringBuilder, String> likeJoining = Collector.of(StringBuilder::new, StringBuilder::append, (sb1, sb2) -> sb1.append(sb2.toString()), StringBuilder::toString);
withData(data)
.terminal(s -> s.map(Object::toString).collect(likeJoining))
.expectedResult(join(data, ""))
.exercise();
withData(data)
.terminal(s -> s.map(Object::toString).collect(Collectors.joining(",")))
.expectedResult(join(data, ","))
.exercise();
withData(data)
.terminal(s -> s.map(Object::toString).collect(Collectors.joining(",", "[", "]")))
.expectedResult("[" + join(data, ",") + "]")
.exercise();
withData(data)
.terminal(s -> s.map(Object::toString)
.collect(StringBuilder::new, StringBuilder::append, StringBuilder::append)
.toString())
.expectedResult(join(data, ""))
.exercise();
withData(data)
.terminal(s -> s.map(Object::toString)
.collect(() -> new StringJoiner(","),
(sj, cs) -> sj.add(cs),
(j1, j2) -> j1.merge(j2))
.toString())
.expectedResult(join(data, ","))
.exercise();
withData(data)
.terminal(s -> s.map(Object::toString)
.collect(() -> new StringJoiner(",", "[", "]"),
(sj, cs) -> sj.add(cs),
(j1, j2) -> j1.merge(j2))
.toString())
.expectedResult("[" + join(data, ",") + "]")
.exercise();
}
private<T> String join(TestData.OfRef<T> data, String delim) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (T i : data) {
if (!first)
sb.append(delim);
sb.append(i.toString());
first = false;
}
return sb.toString();
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSimpleToMap(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Function<Integer, Integer> keyFn = i -> i * 2;
Function<Integer, Integer> valueFn = i -> i * 4;
List<Integer> dataAsList = Arrays.asList(data.stream().toArray(Integer[]::new));
Set<Integer> dataAsSet = new HashSet<>(dataAsList);
BinaryOperator<Integer> sum = Integer::sum;
for (BinaryOperator<Integer> op : Arrays.asList((u, v) -> u,
(u, v) -> v,
sum)) {
try {
exerciseMapTabulation(data, toMap(keyFn, valueFn),
new ToMapAssertion<>(keyFn, valueFn, op, HashMap.class));
if (dataAsList.size() != dataAsSet.size())
fail("Expected ISE on input with duplicates");
}
catch (IllegalStateException e) {
if (dataAsList.size() == dataAsSet.size())
fail("Expected no ISE on input without duplicates");
}
exerciseMapTabulation(data, toMap(keyFn, valueFn, op),
new ToMapAssertion<>(keyFn, valueFn, op, HashMap.class));
exerciseMapTabulation(data, toMap(keyFn, valueFn, op, TreeMap::new),
new ToMapAssertion<>(keyFn, valueFn, op, TreeMap.class));
}
// For concurrent maps, only use commutative merge functions
try {
exerciseMapTabulation(data, toConcurrentMap(keyFn, valueFn),
new ToMapAssertion<>(keyFn, valueFn, sum, ConcurrentHashMap.class));
if (dataAsList.size() != dataAsSet.size())
fail("Expected ISE on input with duplicates");
}
catch (IllegalStateException e) {
if (dataAsList.size() == dataAsSet.size())
fail("Expected no ISE on input without duplicates");
}
exerciseMapTabulation(data, toConcurrentMap(keyFn, valueFn, sum),
new ToMapAssertion<>(keyFn, valueFn, sum, ConcurrentHashMap.class));
exerciseMapTabulation(data, toConcurrentMap(keyFn, valueFn, sum, ConcurrentSkipListMap::new),
new ToMapAssertion<>(keyFn, valueFn, sum, ConcurrentSkipListMap.class));
}
@Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class) @Test(dataProvider = "StreamTestData<Integer>", dataProviderClass = StreamTestDataProvider.class)
public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException { public void testSimpleGroupBy(String name, TestData.OfRef<Integer> data) throws ReflectiveOperationException {
Function<Integer, Integer> classifier = i -> i % 3; Function<Integer, Integer> classifier = i -> i % 3;
......
...@@ -47,7 +47,7 @@ public class MethodReferenceTestInstanceMethod { ...@@ -47,7 +47,7 @@ public class MethodReferenceTestInstanceMethod {
} }
public void testStringBuffer() { public void testStringBuffer() {
String s = generate().collect(Collectors.toStringBuilder()).toString(); String s = generate().collect(Collectors.joining());
assertEquals(s, "THREEFOURFIVE"); assertEquals(s, "THREEFOURFIVE");
} }
......
...@@ -119,7 +119,7 @@ public class TestHarness { ...@@ -119,7 +119,7 @@ public class TestHarness {
Class stub = new Class(specimen.getName(), cm); Class stub = new Class(specimen.getName(), cm);
String params = String params =
Arrays.asList(args).stream().collect(Collectors.toStringJoiner(", ")).toString(); Arrays.asList(args).stream().collect(Collectors.joining(", ")).toString();
ConcreteMethod sm = new ConcreteMethod( ConcreteMethod sm = new ConcreteMethod(
method.getReturnType(), method.getName(), method.getReturnType(), method.getName(),
...@@ -150,7 +150,7 @@ public class TestHarness { ...@@ -150,7 +150,7 @@ public class TestHarness {
null, Arrays.asList((Method)method)); null, Arrays.asList((Method)method));
Class cstub = new Class(specimen.getName()); Class cstub = new Class(specimen.getName());
String params = Arrays.asList(args).stream().collect(Collectors.toStringJoiner(", ")).toString(); String params = Arrays.asList(args).stream().collect(Collectors.joining(", ")).toString();
ConcreteMethod sm = new ConcreteMethod( ConcreteMethod sm = new ConcreteMethod(
"int", SourceModel.stdMethodName, "int", SourceModel.stdMethodName,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册