Related
public interface IPage<T> extends Serializable {
/** #deprecated */
#Deprecated
default String[] descs() {
return null;
}
/** #deprecated */
#Deprecated
default String[] ascs() {
return null;
}
List<OrderItem> orders();
default Map<Object, Object> condition() {
return null;
}
default boolean optimizeCountSql() {
return true;
}
default boolean isSearchCount() {
return true;
}
default long offset() {
return this.getCurrent() > 0L ? (this.getCurrent() - 1L) * this.getSize() : 0L;
}
default long getPages() {
if (this.getSize() == 0L) {
return 0L;
} else {
long pages = this.getTotal() / this.getSize();
if (this.getTotal() % this.getSize() != 0L) {
++pages;
}
return pages;
}
}
default IPage<T> setPages(long pages) {
return this;
}
default void hitCount(boolean hit) {
}
default boolean isHitCount() {
return false;
}
List<T> getRecords();
IPage<T> setRecords(List<T> records);
long getTotal();
IPage<T> setTotal(long total);
long getSize();
IPage<T> setSize(long size);
long getCurrent();
IPage<T> setCurrent(long current);
default <R> IPage<R> convert(Function<? super T, ? extends R> mapper) {
List<R> collect = (List)this.getRecords().stream().map(mapper).collect(Collectors.toList());
return this.setRecords(collect);
}
default String cacheKey() {
StringBuilder key = new StringBuilder();
key.append(this.offset()).append(":").append(this.getSize());
List<OrderItem> orders = this.orders();
if (CollectionUtils.isNotEmpty(orders)) {
Iterator var3 = orders.iterator();
while(var3.hasNext()) {
OrderItem item = (OrderItem)var3.next();
key.append(":").append(item.getColumn()).append(":").append(item.isAsc());
}
}
return key.toString();
}
}
This is the source code of a framework,
when I use convert() function
default <R> IPage<R> convert(Function<? super T, ? extends R> mapper) {
List<R> collect = (List)this.getRecords().stream().map(mapper).collect(Collectors.toList());
return this.setRecords(collect);
}
what makes me wonder is that The return type is new type variable R
and he just call the this.setRecords(collect);
but setRecords() funcation just receive List <T>!
IPage<T> setRecords(List<T> records);
To verify this, I wrote an interface myself, but the compilation failed
public interface IPage<T> {
IPage<T> setRecords(List<T> list);
default <R> IPage<R> convert() {
List<R> collect = new ArrayList<>();
return this.setRecords(collect); //error
}
}
Can someone help me solve my doubts?Thank you very much!
The source code of my-batis plus contains an additional cast to IPage to change the type of this.
The method code is:
default <R> IPage<R> convert(Function<? super T, ? extends R> mapper) {
List<R> collect = this.getRecords().stream().map(mapper).collect(toList());
return ((IPage<R>) this).setRecords(collect);
}
I am trying to create a two dimensional list in java.
My first and preferred method is as so:
List<List<Integer>> seqList = IntStream.range(0, n)
.mapToObj(ArrayList<Integer>::new)
.collect(Collectors.toList());
However, for some reason this method takes too long and I get a timeout.
On the other hand, when I tried to create the two dimensional list using java 7 like so, there was no timeout.
List<List<Integer>> seqList = new ArrayList<>();
for(int i = 0; i < n; i++) {
seqList.add(new ArrayList<>());
}
I am trying to use as much java-8 streams as possible. Could someone explain to me why my java-8 code is taking too long and what I can do to make it run in the same time complexity as the java-7 code.
This is an alternative way to do it.
int n = 10;
List<List<Integer>> seqList =Stream.<List<Integer>>generate(()->new ArrayList<>())
.limit(n).collect(Collectors.toList());
Thanks to Jacob G I was able to see the problem.
The call .mapToObj(ArrayList<Integer>::new) was creating ArrayLists of varying size. It was equivalent to .mapToObj(i -> new ArrayList<Integer>(i)). Now this means that creating new arraylist objects when i is huge take longer hence the timeout. The better code is as follows:
List<List<Integer>> seqList2 = IntStream.range(0, n)
.mapToObj(i -> new ArrayList<Integer>())
.collect(Collectors.toList());
The relative cost of the streaming APIs will be high, even with the correction. This can be seen by walking through the many steps which are performed. The complexity is rather quite extraordinary.
The code examples, below, are from the IBM Java SE Runtime Environment version 1.8.
// A walkthrough of the operation:
//
// "Create a list of lists by collecting the results of applying the ArrayList
// initializer to the stream of 'int' values ranging from 0 to 10."
static {
List<List<Integer>> seqList = IntStream.range(0, 10)
.mapToObj( ArrayList<Integer>::new )
.collect( Collectors.toList() );
}
// First step: Create an 'int' Stream.
//
// Roughly, create an 'int' iterator, then wrap that in a 'int' stream.
//
// The iterator is responsible for knowing the initial and final values
// over the range of iteration, and for providing basic iteration.
//
// However, 'mapToObj' is part of the streaming API. The iterator
// must be put into a stream to access that API.
// The 'int' stream factory method.
//
// Fan out to 'RangeIntSpliterator.init' and to 'StreamSupport.intStream'.
//
// The 'int' stream is created with 'parallel' set to false.
class IntStream {
public static IntStream range(int startInclusive, int endExclusive) {
if ( startInclusive >= endExclusive ) {
return empty();
} else {
return StreamSupport.intStream(
new Streams.RangeIntSpliterator(startInclusive, endExclusive, false),
false );
}
}
}
// The 'int' iterator type.
//
// After setup, 'forEachRemaining' will be used to perform
// the 'int' iteration.
class RangeIntSpliterator implements Spliterator.OfInt {
protected int from;
protected final int upTo;
protected int last;
RangeIntSpliterator(int from, int upTo, boolean closed) {
this( from, upTo, (closed ? 1 : 0) );
}
void forEachRemaining(Consumer<? super Integer> action);
void forEachRemaining(IntConsumer consumer);
}
// The 'int' stream factory method.
//
// Fan out to 'IntPipeline.Head<>.init'. 'IntPipeline.Head' extends
// 'IntPipeline', which extends 'AbstractPipeline'.
//
// 'IntPipeline.mapToObj' creates an stream of 'ArrayList' instances
// out of the stream of 'int' instances.
class StreamSupport {
public static IntStream intStream(Spliterator.OfInt spliterator, boolean parallel) {
return new IntPipeline.Head<>(
spliterator,
StreamOpFlag.fromCharacteristics(spliterator),
parallel );
}
}
class IntPipeLine.Head<> extends IntPipeline<> {
Head(Spliterator<Integer> source, int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
}
class IntPipeline<>
extends AbstractPipeline<, Integer, IntStream>
implements IntStream {
IntPipeline(Spliterator<Integer> source, int sourceFlags, boolean parallel) {
super(source, sourceFlags, parallel);
}
<U> Stream<U> mapToObj(IntFunction<? extends U> mapper);
}
class AbstractPipeline {
AbstractPipeline(Spliterator<?> source, int sourceFlags, boolean parallel) {
this.previousStage = null;
this.sourceSpliterator = source;
this.sourceStage = this;
this.sourceOrOpFlags = ( sourceFlags & StreamOpFlag.STREAM_MASK );
this.combinedFlags = ( (~(sourceOrOpFlags << 1)) & StreamOpFlag.INITIAL_OPS_VALUE );
this.depth = 0;
this.parallel = parallel;
}
}
// Second step: Create a second stream by composing the 'int' stream with the ArrayList
// initializer.
//
// Fan out to 'ReferencePipeline.StatelessOp'. 'StatelessOp' extends 'ReferencePipeline',
// which extends 'AbstractPipeline'.
class IntPipeline {
#Override
public final <U> Stream<U> mapToObj(IntFunction<? extends U> mapper) {
Objects.requireNonNull(mapper);
return new ReferencePipeline.StatelessOp<Integer, U>(
this,
StreamShape.INT_VALUE,
(StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) ) {
Sink<Integer> opWrapSink(int flags, Sink<U> sink) {
return new Sink.ChainedInt<U>(sink) {
public void accept(int t) {
downstream.accept( mapper.apply(t) );
}
};
}
};
}
}
class StatelessOp<E_IN, E_OUT> extends ReferencePipeline<E_IN, E_OUT> {
StatelessOp(AbstractPipeline<?, E_IN, ?> upstream, StreamShape inputShape, int opFlags) {
super(upstream, opFlags);
assert upstream.getOutputShape() == inputShape;
}
abstract class ReferencePipeline<P_IN, P_OUT>
extends AbstractPipeline<P_IN, P_OUT, Stream<P_OUT>>
implements Stream<P_OUT> {
ReferencePipeline(Supplier<? extends Spliterator<?>> source, int sourceFlags) {
super(source, sourceFlags);
}
}
abstract class AbstractPipeline<E_IN, E_OUT, S extends BaseStream<E_OUT, S>>
extends PipelineHelper<E_OUT> implements BaseStream<E_OUT, S> {
AbstractPipeline(AbstractPipeline<?, E_IN, ?> previousStage, int opFlags) {
if ( previousStage.linkedOrConsumed ) {
throw new IllegalStateException(MSG_STREAM_LINKED);
}
previousStage.linkedOrConsumed = true;
previousStage.nextStage = this;
this.previousStage = previousStage;
this.sourceOrOpFlags = opFlags & StreamOpFlag.OP_MASK;
this.combinedFlags = StreamOpFlag.combineOpFlags(opFlags, previousStage.combinedFlags);
this.sourceStage = previousStage.sourceStage;
if ( opIsStateful() ) {
sourceStage.sourceAnyStateful = true;
}
this.depth = previousStage.depth + 1;
}
}
// Third step: Obtain the collector which is to be used by the 'int' stream.
//
// Note use of 'CH_ID', which marks the collector as an 'identity finisher'.
class Collectors {
static final Set<Collector.Characteristics> CH_ID =
Collections.unmodifiableSet( EnumSet.of(Collector.Characteristics.IDENTITY_FINISH) );
public static <T> Collector<T, ?, List<T>> toList() {
return new CollectorImpl<>(
(Supplier<List<T>>) ArrayList::new,
List::add,
(left, right) -> { left.addAll(right); return left; },
CH_ID);
}
}
class CollectorImpl<T, A, R> implements Collector<T, A, R> {
private final Supplier<A> supplier;
private final BiConsumer<A, T> accumulator;
private final BinaryOperator<A> combiner;
private final Function<A, R> finisher;
private final Set<Characteristics> characteristics;
CollectorImpl(
Supplier<A> supplier,
BiConsumer<A, T> accumulator,
BinaryOperator<A> combiner,
Function<A,R> finisher,
Set<Characteristics> characteristics) {
this.supplier = supplier;
this.accumulator = accumulator;
this.combiner = combiner;
this.finisher = finisher;
this.characteristics = characteristics;
}
CollectorImpl(
Supplier<A> supplier,
BiConsumer<A, T> accumulator,
BinaryOperator<A> combiner,
Set<Characteristics> characteristics) {
this(supplier, accumulator, combiner, castingIdentity(), characteristics);
}
}
// Fourth step: Start collection.
//
// Push the collector through 'ReduceOps.makeRef'.
class ReferencePipeline {
public final <R, A> R collect(Collector<? super P_OUT, A, R> collector) {
A container;
if ( isParallel() &&
(collector.characteristics().contains(Collector.Characteristics.CONCURRENT)) &&
(!isOrdered() ||
collector.characteristics().contains(Collector.Characteristics.UNORDERED))) {
container = collector.supplier().get();
BiConsumer<A, ? super P_OUT> accumulator = collector.accumulator();
forEach(u -> accumulator.accept(container, u));
} else {
container = evaluate( ReduceOps.makeRef(collector) );
}
return collector.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)
? (R) container
: collector.finisher().apply(container);
}
}
class ReduceOps {
public static <T, I> TerminalOp<T, I> makeRef(Collector<? super T, I, ?> collector) {
Supplier<I> supplier = Objects.requireNonNull(collector).supplier();
BiConsumer<I, ? super T> accumulator = collector.accumulator();
BinaryOperator<I> combiner = collector.combiner();
class ReducingSink extends Box<I> implements AccumulatingSink<T, I, ReducingSink> {
public void begin(long size) {
state = supplier.get();
}
public void accept(T t) {
accumulator.accept(state, t);
}
public void combine(ReducingSink other) {
state = combiner.apply(state, other.state);
}
}
return new ReduceOp<T, I, ReducingSink>(StreamShape.REFERENCE) {
public ReducingSink makeSink() {
return new ReducingSink();
}
};
}
}
class ReduceOp<T, R, S extends AccumulatingSink<T, R, S>> implements TerminalOp<T, R> {
private final StreamShape inputShape;
ReduceOp(StreamShape shape) {
inputShape = shape;
}
}
// Fifth step: Walk into the stream API.
class ReferencePipeline {
<R> R evaluate(TerminalOp<E_OUT, R> terminalOp) {
assert ( getOutputShape() == terminalOp.inputShape() );
if ( linkedOrConsumed ) {
throw new IllegalStateException(MSG_STREAM_LINKED);
}
linkedOrConsumed = true;
return ( isParallel()
? terminalOp.evaluateParallel( this, sourceSpliterator( terminalOp.getOpFlags() ) )
: terminalOp.evaluateSequential( this, sourceSpliterator( terminalOp.getOpFlags() ) ) );
}
}
class AbstractPipeline {
Spliterator<E_OUT> sourceStageSpliterator() {
if ( this != sourceStage ) {
throw new IllegalStateException();
}
if ( linkedOrConsumed ) {
throw new IllegalStateException(MSG_STREAM_LINKED);
}
linkedOrConsumed = true;
if ( sourceStage.sourceSpliterator != null ) {
Spliterator<E_OUT> s = sourceStage.sourceSpliterator;
sourceStage.sourceSpliterator = null;
return s;
} else if ( sourceStage.sourceSupplier != null ) {
Spliterator<E_OUT> s = (Spliterator<E_OUT>) sourceStage.sourceSupplier.get();
sourceStage.sourceSupplier = null;
return s;
} else {
throw new IllegalStateException(MSG_CONSUMED);
}
}
}
class ReduceOp {
public <P_IN> R evaluateSequential(
PipelineHelper<T> helper,
Spliterator<P_IN> spliterator) {
return helper.wrapAndCopyInto( makeSink(), spliterator ).get();
}
}
class AbstractPipeline {
final <P_IN, S extends Sink<E_OUT>> S wrapAndCopyInto(S sink, Spliterator<P_IN> spliterator) {
copyInto( wrapSink( Objects.requireNonNull(sink) ), spliterator );
return sink;
}
}
<P_IN> Sink<P_IN> wrapSink(Sink<E_OUT> sink) {
Objects.requireNonNull(sink);
for ( AbstractPipeline p = AbstractPipeline.this; p.depth > 0; p = p.previousStage ) {
sink = p.opWrapSink( p.previousStage.combinedFlags, sink );
}
return (Sink<P_IN>) sink;
}
class StatelessOp {
Sink<Integer> opWrapSink(int flags, Sink<U> sink) {
return new Sink.ChainedInt<U>(sink) {
public void accept(int t) {
downstream.accept( mapper.apply(t) );
}
};
}
}
// Sixth step: Perform the actual iteration and collection.
//
// Ignoring 'begin' and 'end', iteration and collection occurs in the call
// to 'forEachRemaining'.
class AbstractPipeline {
<P_IN> void copyInto(Sink<P_IN> wrappedSink, Spliterator<P_IN> spliterator) {
Objects.requireNonNull(wrappedSink);
if ( !StreamOpFlag.SHORT_CIRCUIT.isKnown( getStreamAndOpFlags() ) ) {
wrappedSink.begin( spliterator.getExactSizeIfKnown() );
spliterator.forEachRemaining(wrappedSink);
wrappedSink.end();
} else {
copyIntoWithCancel(wrappedSink, spliterator);
}
}
}
class RangeIntSpliterator implements Spliterator.OfInt {
void forEachRemaining(Consumer<? super Integer> action) {
if ( action instanceof IntConsumer ) {
forEachRemaining((IntConsumer) action);
} else {
if ( Tripwire.ENABLED ) {
Tripwire.trip(getClass(), "{0} calling Spliterator.OfInt.forEachRemaining((IntConsumer) action::accept)");
forEachRemaining((IntConsumer) action::accept);
}
}
}
void forEachRemaining(IntConsumer consumer) {
Objects.requireNonNull(consumer);
int i = from;
final int hUpTo = upTo;
int hLast = last;
from = upTo;
last = 0;
while ( i < hUpTo ) {
consumer.accept(i++);
}
if ( hLast > 0 ) {
consumer.accept(i);
}
}
}
// Seventh step: For each iteration, unwind and perform the mapping and
// collection operations.
class new Sink.ChainedInt<U>(sink) {
public void accept(int t) {
downstream.accept( mapper.apply(t) );
}
}
class ArrayList {
public ArrayList(int initialCapacity) {
// ...
}
}
class ReducingSink {
public void accept(T t) {
accumulator.accept(state, t);
}
}
class ArrayList {
public boolean add(E e) {
// ...
}
}
// Eigth step: Walking out with the return value.
IntPipeline$4(AbstractPipeline<E_IN,E_OUT,S>).wrapAndCopyInto(S, Spliterator<P_IN>)
-- returns a 'ReducingSink' instance.
ReduceOps$3(ReduceOps$ReduceOp<T,R,S>).evaluateSequential(PipelineHelper<T>, Spliterator<P_IN>)
-- returns the 'ArrayList' instance.
IntPipeline$4(AbstractPipeline<E_IN,E_OUT,S>).evaluate(TerminalOp<E_OUT,R>)
-- returns the 'ArrayList' instance.
IntPipeline$4(ReferencePipeline<P_IN,P_OUT>).collect(Collector<? super P_OUT,A,R>)
-- returns the 'ArrayList' instance.
Tester.main
I am stuck and unsure how to properly check / ensure that my B is operating on valid types found in the collection.
OperateAll should be generic, accepts a collection of objects, and only uses Bs that are of the type of the objects found in the collection.
public interface B<T> {
public boolean operate(T t);
}
public class OperateAll<T> implements B<T> {
private Collection<T> collection;
public OperateAll(Collection<T> collection) {
this.collection = collection;
}
//I need to ensure T is of type B<T>, so I can do the described if statement
public boolean operate(T t) {
if (t == null) {
return false;
}
for (T item : reference) {
// if !t.operate(item) return false;
}
return true;
}
}
Clarification on the problem:
I need do something like this:
Collection<Integer> collection = new LinkedList<>();
Integer[] numbers = new Integer[]{1, 2, 3, 4, 5};
Collections.addAll(collection, numbers);
B<Integer> op = new OperateAll<>(collection);
B<Integer> validateNumber = new ValidNumber<>();
//if B<String> validateNumber, this should not be allowed as an argument.
op.operate(validateNumber);
This way, validateNumber can check if it can operate on all the items in the collection in op.
If I understood correctly, you don't need the constraint on T. Instead, OperateAll should implement B<B<T>>, not B<T>:
class OperateAll<T> implements B<B<T>> {
private Collection<T> collection;
public OperateAll(Collection<T> collection) {
this.collection = collection;
}
public boolean operate(B<T> t) {
if (t == null) {
return false;
}
for (T item : collection) {
if (!t.operate(item)) return false;
}
return true;
}
}
This makes code like this possible:
Collection<Integer> collection = new LinkedList<>();
Integer[] numbers = new Integer[]{1, 2, 3, 4, 5};
Collections.addAll(collection, numbers);
// note the type of op
B<B<Integer>> op = new OperateAll<>(collection);
B<Integer> validateNumber = x -> x > 3;
op.operate(validateNumber);
B<String> validateString = x -> x.length() > 3;
op.operate(validateString); // error
You can do it this way:
public class OperateAll<T> implements B<B<T>> {
private final Collection<? extends T> collection;
// Bounded wildcard is used here because this class only reads from collection
public OperateAll(Collection<? extends T> collection) {
this.collection = collection;
}
#Override
public boolean operate(B<T> t) {
return t != null && collection.stream().allMatch(t::operate);
}
}
Note: if you want you can implement operate in an imperative way as well:
#Override
public boolean operate(B<T> t) {
if (t == null) return false;
for (T elm : collection)
if (!t.operate(elm)) return false;
return true;
}
Note 2: I suggest using Predicate instead of B.
I am creating a class CommonAggregator which can aggregate any object which is Aggregatable.
public interface Aggregatable<T> {
public void addFieldValue(T t, AggregationField<T> field);
}
public class Charges implements Aggregatable<Charges>{
//privat fields
//Getter and Setters
#Override
public void addFieldValue(Charges Charges, AggregationField<Charges> field){
if(ChargeDetailAggregationField.TYPE1 == field){
Type1 += Charges.getType1();
}else if(ChargeDetailAggregationField.TYPE2 == field){
Type2 += Charges.getType2();
}else if(ChargeDetailAggregationField.TYPE3 == field){
Type3 += Charges.getType3();
}
}
}
public class CommonAggregator<T extends Aggregatable<T>> {
private static enum AggregationOperation {
SUM, MAX, MIN, AVG;
}
private AggregationField<T>[] fieldsForSum;
private AggregationField<T>[] fieldsForMax;
private AggregationField<T>[] fieldsForMin;
//private AggregationField groupByField = null;
public CommonAggregator<T> sum(AggregationField<T>... fields){
this.fieldsForSum = fields;
return this;
}
public CommonAggregator<T> max(AggregationField<T>... fields){
this.fieldsForMax = fields;
return this;
}
public CommonAggregator<T> min(AggregationField<T>... fields){
this.fieldsForMin = fields;
return this;
}
private <T> void performOperation(AggregationOperation op,AggregatedResponse<T> aggregatedDetails,List<T> aggregatables,AggregationField<T>... fields){
Aggregatable<T> aggregatedResponse = (Aggregatable<T>) getNewInstance();
T response = null;
for(AggregationField<T> field:fields){
if(op == AggregationOperation.MAX){
response = max(field,aggregatables);//Compilation Err
}else if(op == AggregationOperation.MIN){
response = min(field,aggregatables);//Compilation Err
}else if(op == AggregationOperation.SUM){
response = sum(field,aggregatables);//Compilation Err
}
aggregatedResponse.setFieldValue(response, field);
if(op == AggregationOperation.MAX){
aggregatedDetails.setMax(aggregatedResponse);
}else if(op == AggregationOperation.MIN){
aggregatedDetails.setMin(aggregatedResponse);
}else if(op == AggregationOperation.SUM){
aggregatedDetails.setSum(aggregatedResponse);
}
}
}
private T max(AggregationField<T> field,List<T> aggregatables){
CommonComparator<T> comparator = new CommonComparator<T>(SortOrder.ASCENDING, field);
return Collections.max(aggregatables, comparator);
}
private T min(AggregationField<T> field,List<T> aggregatables){
CommonComparator<T> comparator = new CommonComparator<T>(SortOrder.ASCENDING, field);
return Collections.min(aggregatables, comparator);
}
private T sum(AggregationField<T> field,List<T> listOfAggregatables){
T aggregatable = listOfAggregatables.get(0);
for(T response :listOfAggregatables.subList(1, listOfAggregatables.size())){
aggregatable.addFieldValue(response, field);
}
return aggregatable;
}
public AggregatedResponse<T> aggregate(List<T> aggregatables){
AggregatedResponse<T> aggregatedDetails = new AggregatedResponse<T>();
if(fieldsForMax != null)
performOperation(AggregationOperation.MAX,aggregatedDetails,aggregatables,fieldsForMax);
if(fieldsForMin != null)
performOperation(AggregationOperation.MIN,aggregatedDetails,aggregatables,fieldsForMin);
if(fieldsForSum != null)
performOperation(AggregationOperation.SUM,aggregatedDetails,aggregatables,fieldsForSum);
return aggregatedDetails;
}
public <E> Map<E,List<T>> groupBy(AggregationField<T> fieldName, List<T> listOfAggregatable){
Map<E,List<T>> groupedList = new HashMap<E,List<T>>();
for(T t:listOfAggregatable){
List<T> subList = null;
E fieldValue = (E)t.getFieldValue(fieldName);
if((subList = groupedList.get(fieldValue)) != null){
subList.add(t);
}else{
subList = new ArrayList<T>();
subList.add(t);
groupedList.put(fieldValue,subList);
}
}
return groupedList;
}
public <E> Map<E,AggregatedResponse<T>> groupByWithAggregation(AggregationField<T> fieldName, List<T> listOfAggregatable){
//groupByField = fieldName;
Map<E, List<T>> groupedByList = groupBy(fieldName, listOfAggregatable);
Map<E,AggregatedResponse<T>> mapOfAggregatedDetails = new HashMap<E, AggregatedResponse<T>>();
for(E key : groupedByList.keySet()){
mapOfAggregatedDetails.put(key, aggregate(groupedByList.get(key)));
}
return mapOfAggregatedDetails;
}
:
}
This is not the complete code.
Here, AggregationField tells which field of Aggregatable class has to be aggregated.
Problem:
I have facing followinf error when calling max(), min(), and sum() in performOperation()
The method max(AggregationField< T>, List< T>) in the type CommonAggregator< T> is not applicable for the arguments (AggregationField< T>, List< T>)
Edit: I have modified the original code and question after #Mikhail suggestion.
I am not good in generics. And I guess I am doing something wrong in generics only.
public interface AggregationField <T>
{
// ...
}
public interface Aggregatable <T>
{
public void addFieldValue (T t, AggregationField <T> field);
}
public class Charges implements Aggregatable <Charges>
{
#Override
public void addFieldValue (
Charges Charges, AggregationField <Charges> field)
{
// ...
}
}
public class CommonAggregator <T extends Aggregatable <T>> {
private T sum (
AggregationField <T> field,
List <? extends T> listOfAggregatables)
{
T aggregatable = listOfAggregatables.get(0);
for (T response: listOfAggregatables.subList (1, listOfAggregatables.size())){
aggregatable.addFieldValue(response, field);
}
return aggregatable;
}
}
I have a method which looks like this:
void foo (List<String> list, ...) {
...
for (String s : list) { // this is the only place where `list` is used
...
}
...
}
the exact same code would work if I replace List<String> list with String[] list, however, to avoid spaghetti code, I keep the single method, and when I need to call it on an array a, I do it like this: foo(Arrays.asList(a)).
I wonder if this is The Right Way.
Specifically,
What is the overhead of Arrays.asList()?
Is there a way to write a method which would accept both arrays and lists, just like the for loop does?
Thanks!
Arrays.asList() has a small overhead. There is no real way to implement one method for both List and arrays.
But you can do the following:
void foo (List<String> list, ...) {
...
for (String s : list) { // this is the only place where *list* is used
...
}
...
}
void foo (String[] arr, ...) {
if ( arr != null ) {
foo(Arrays.asList(arr),...);
}
}
From the source code of openjdk, Arrays.asList:
public static <T> List<T> asList(T... a) {
return new ArrayList<>(a);
}
furthermore:
ArrayList(E[] array) {
if (array==null)
throw new NullPointerException();
a = array;
}
So basically all that happens in an assignment, so the overhead should be negligible.
The overhead is that it converts an array to a list--how it does so would be implementation-dependent, it only needs to fulfill the contract.
IMO you should write two methods if you're concerned about the potential runtime overhead: that is the nature of Java; methods have type signatures, and they must be obeyed.
Do avoid this I just use and allow Lists, Sets and Maps (like Joshua Bloch told us). There is no way to merge both "collection types".
An alternative is to use guava (Iterators/Iteratables). So you can iterarte over your collections without a deep copy of them.
Good question.
This is a very common case, and is often dealt with by writing two separate methods. However code duplication is really a bad idea, and whenever you find yourself duplicating code, you should start looking for opportunities to factor your code better. (As you are doing right now!)
Now if you look into the source of java.util.Arrays, you will notice that Arrays.asList retruns an instance of a private inner class Arrays.ArrayList which is just a thin wrapper over plain arrays, and delegates all relevant method calls to it. (This is known as a projection or view of a data structure.) Therefore the overhead incurred is insignificant (unless you are striving to extract every last bit of performance), and in my opinion, you should go ahead and use this method without worrying about performance.
The solution I personally use is as follows.
I have a class named RichIterable in my personal utils. As the name indicates the class wraps over Iterable and provides some additional useful methods not already present. The class also has a factory method that creates an RichIterable from an array. Here is the class definition.
public class RichIterable<A> implements Iterable<A> {
private Iterable<A> xs;
private RichIterable(Iterable<A> xs) {
this.xs = xs;
}
public static <A> RichIterable<A> from(Iterable<A> xs) {
if (xs instanceof RichIterable) {
return (RichIterable<A>) xs;
} else {
return new RichIterable<A>(xs);
}
}
public static <A> RichIterable<A> from(final Enumeration<A> xs) {
Iterable<A> iterable = new Iterable<A>() {
#Override
public Iterator<A> iterator() {
return new Iterator<A>() {
#Override
public boolean hasNext() {
return xs.hasMoreElements();
}
#Override
public A next() {
return xs.nextElement();
}
#Override
public void remove() {
throw new UnsupportedOperationException(
"Cannot remove an element from an enumeration.");
}
};
}
};
return RichIterable.from(iterable);
}
public static <A> RichIterable<A> from(final A[] xs) {
Iterable<A> iterable = new Iterable<A>() {
#Override
public Iterator<A> iterator() {
return new Iterator<A>() {
private int i = 0;
#Override
public boolean hasNext() {
return i < xs.length;
}
#Override
public A next() {
A x = xs[i];
i++;
return x;
}
#Override
public void remove() {
throw new UnsupportedOperationException(
"Cannot remove an element from an array.");
}
};
}
};
return RichIterable.from(iterable);
}
public boolean isEmpty() {
if (xs instanceof Collection) {
return ((Collection) xs).isEmpty();
}
for (A x : xs) {
return false;
}
return true;
}
public int size() {
if (xs instanceof Collection) {
return ((Collection) xs).size();
}
int size = 0;
for (A x : xs) {
size++;
}
return size;
}
public ArrayList<A> toArrayList() {
ArrayList<A> ys = new ArrayList<A>();
for (A x : xs) {
ys.add(x);
}
return ys;
}
public <B> RichIterable<B> map(F1<A, B> f) {
List<B> ys = new ArrayList<B>();
for (A x : xs) {
ys.add(f.apply(x));
}
return RichIterable.from(ys);
}
public RichIterable<A> filter(F1<A, Boolean> pred) {
List<A> ys = new ArrayList<A>();
Arrays.asList();
for (A x : xs) {
if (pred.apply(x)) {
ys.add(x);
}
}
return RichIterable.from(ys);
}
public boolean exists(F1<A, Boolean> pred) {
for (A x : xs) {
if (pred.apply(x)) {
return true;
}
}
return false;
}
public boolean forall(F1<A, Boolean> pred) {
for (A x : xs) {
if (!pred.apply(x)) {
return false;
}
}
return true;
}
public Maybe<A> find(F1<A, Boolean> pred) {
for (A x : xs) {
if (pred.apply(x)) {
return Just.of(x);
}
}
return Nothing.value();
}
public String mkString(String beg, String sep, String end) {
Iterator<A> i = xs.iterator();
if (!i.hasNext()) {
return beg + end;
}
StringBuilder sb = new StringBuilder();
sb.append(beg);
while (true) {
A e = i.next();
sb.append(e.toString());
if (!i.hasNext()) {
return sb.append(end).toString();
}
sb.append(sep);
}
}
public String mkString(String sep) {
return mkString("", sep, "");
}
public String mkString() {
return this.mkString(", ");
}
public Iterable<A> getRaw() {
return xs;
}
#Override
public Iterator<A> iterator() {
return xs.iterator();
}
}