Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

LUCENE-7691 - add explicit size to arraylist creation if known #157

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,9 @@ public void inform(ResourceLoader loader) throws IOException {
String dicts[] = dictionaryFiles.split(",");

InputStream affix = null;
List<InputStream> dictionaries = new ArrayList<>();
List<InputStream> dictionaries = new ArrayList<>(dicts.length);

try {
dictionaries = new ArrayList<>();
for (String file : dicts) {
dictionaries.add(loader.openResource(file));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public CapitalizationFilterFactory(Map<String, String> args) {

k = getSet(args, OK_PREFIX);
if (k != null) {
okPrefix = new ArrayList<>();
okPrefix = new ArrayList<>(k.size());
for (String item : k) {
okPrefix.add(item.toCharArray());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ private void bufferOutputTokens(BytesRef bytes, int matchInputLength) {
// TODO: we could encode this instead into the FST:

// 1st pass: count how many new nodes we need
List<List<String>> paths = new ArrayList<>();
List<List<String>> paths = new ArrayList<>(count);
for(int outputIDX=0;outputIDX<count;outputIDX++) {
int wordID = bytesReader.readVInt();
synonyms.words.get(wordID, scratchBytes);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -293,8 +293,9 @@ protected final List<String> splitFileNames(String fileNames) {
if (fileNames == null)
return Collections.<String>emptyList();

List<String> result = new ArrayList<>();
for (String file : fileNames.split("(?<!\\\\),")) {
String[] fileNamesArr = fileNames.split("(?<!\\\\),");
List<String> result = new ArrayList<>(fileNamesArr.length);
for (String file : fileNamesArr) {
result.add(file.replaceAll("\\\\(?=,)", ""));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,9 @@ public boolean accept(File dir, String name) {
return name.endsWith(".csv");
}
};
ArrayList<File> csvFiles = new ArrayList<>();
for (File file : new File(dirname).listFiles(filter)) {
File[] foundFiles = new File(dirname).listFiles(filter);
ArrayList<File> csvFiles = new ArrayList<>(foundFiles.length);
for (File file : foundFiles) {
csvFiles.add(file);
}
Collections.sort(csvFiles);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,10 +167,10 @@ public int getToCount() {
public List<SegToken> getShortPath() {
int current;
int nodeCount = getToCount();
List<PathNode> path = new ArrayList<>();
PathNode zeroPath = new PathNode();
zeroPath.weight = 0;
zeroPath.preNode = 0;
List<PathNode> path = new ArrayList<>(nodeCount + 1);
path.add(zeroPath);
for (current = 1; current <= nodeCount; current++) {
double weight;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ public Gener() {}
@Override
public Trie optimize(Trie orig) {
List<CharSequence> cmds = orig.cmds;
List<Row> rows = new ArrayList<>();
List<Row> orows = orig.rows;
int remap[] = new int[orows.size()];

Expand All @@ -90,7 +89,7 @@ public Trie optimize(Trie orig) {
}

Arrays.fill(remap, -1);
rows = removeGaps(orig.root, orows, new ArrayList<Row>(), remap);
List<Row> rows = removeGaps(orig.root, orows, new ArrayList<Row>(), remap);

return new Trie(orig.forward, remap[orig.root], cmds, rows);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ public Lift(boolean changeSkip) {
@Override
public Trie optimize(Trie orig) {
List<CharSequence> cmds = orig.cmds;
List<Row> rows = new ArrayList<>();
List<Row> orows = orig.rows;
int remap[] = new int[orows.size()];

Expand All @@ -97,7 +96,7 @@ public Trie optimize(Trie orig) {
}

Arrays.fill(remap, -1);
rows = removeGaps(orig.root, orows, new ArrayList<Row>(), remap);
List<Row> rows = removeGaps(orig.root, orows, new ArrayList<Row>(), remap);

return new Trie(orig.forward, remap[orig.root], cmds, rows);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ public void add(CharSequence key, CharSequence cmd) {
*/
@Override
public Trie reduce(Reduce by) {
List<Trie> h = new ArrayList<>();
List<Trie> h = new ArrayList<>(tries.size());
for (Trie trie : tries)
h.add(trie.reduce(by));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ public CharSequence[] decompose(CharSequence cmd) {
*/
@Override
public Trie reduce(Reduce by) {
List<Trie> h = new ArrayList<>();
List<Trie> h = new ArrayList<>(tries.size());
for (Trie trie : tries)
h.add(trie.reduce(by));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1531,7 +1531,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(2);
resources.add(Accountables.namedAccountable("term bytes", terms));
resources.add(Accountables.namedAccountable("term addresses", termAddresses));
return Collections.unmodifiableList(resources);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -843,7 +843,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(2);
if (indexReader != null) {
resources.add(Accountables.namedAccountable("term index", indexReader));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(2);
if (termOffsets != null) {
resources.add(Accountables.namedAccountable("term lengths", termOffsets));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -631,7 +631,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(2);
if (address != null) {
resources.add(Accountables.namedAccountable("addresses", RamUsageEstimator.sizeOf(address)));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(2);
if (addresses != null) {
resources.add(Accountables.namedAccountable("addresses", addresses));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,7 @@ public int nextDoc() throws IOException {
* an Iterable that merges ordinals and values and filters deleted documents .
*/
public void mergeSortedField(FieldInfo fieldInfo, final MergeState mergeState) throws IOException {
List<SortedDocValues> toMerge = new ArrayList<>();
List<SortedDocValues> toMerge = new ArrayList<>(mergeState.docValuesProducers.length);
for (int i=0;i<mergeState.docValuesProducers.length;i++) {
SortedDocValues values = null;
DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i];
Expand Down Expand Up @@ -647,7 +647,7 @@ public String toString() {
*/
public void mergeSortedSetField(FieldInfo mergeFieldInfo, final MergeState mergeState) throws IOException {

List<SortedSetDocValues> toMerge = new ArrayList<>();
List<SortedSetDocValues> toMerge = new ArrayList<>(mergeState.docValuesProducers.length);
for (int i=0;i<mergeState.docValuesProducers.length;i++) {
SortedSetDocValues values = null;
DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ protected FieldsConsumer() {
* Implementations can override this method for more sophisticated
* merging (bulk-byte copying, etc). */
public void merge(MergeState mergeState) throws IOException {
final List<Fields> fields = new ArrayList<>();
final List<ReaderSlice> slices = new ArrayList<>();
final List<Fields> fields = new ArrayList<>(mergeState.fieldsProducers.length);
final List<ReaderSlice> slices = new ArrayList<>(mergeState.fieldsProducers.length);

int docBase = 0;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public int nextDoc() {
* Implementations can override this method for more sophisticated
* merging (bulk-byte copying, etc). */
public int merge(MergeState mergeState) throws IOException {
List<StoredFieldsMergeSub> subs = new ArrayList<>();
List<StoredFieldsMergeSub> subs = new ArrayList<>(mergeState.storedFieldsReaders.length);
for(int i=0;i<mergeState.storedFieldsReaders.length;i++) {
StoredFieldsReader storedFieldsReader = mergeState.storedFieldsReaders[i];
storedFieldsReader.checkIntegrity();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ public int nextDoc() {
* merging (bulk-byte copying, etc). */
public int merge(MergeState mergeState) throws IOException {

List<TermVectorsMergeSub> subs = new ArrayList<>();
List<TermVectorsMergeSub> subs = new ArrayList<>(mergeState.termVectorsReaders.length);
for(int i=0;i<mergeState.termVectorsReaders.length;i++) {
TermVectorsReader reader = mergeState.termVectorsReaders[i];
if (reader != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(2);

long docBaseDeltaBytes = RamUsageEstimator.shallowSizeOf(docBasesDeltas);
for (PackedInts.Reader r : docBasesDeltas) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,7 @@ public int merge(MergeState mergeState) throws IOException {
* If all readers are compressed and they have the same fieldinfos then we can merge the serialized document
* directly.
*/
List<CompressingStoredFieldsMergeSub> subs = new ArrayList<>();
List<CompressingStoredFieldsMergeSub> subs = new ArrayList<>(mergeState.storedFieldsReaders.length);
for(int i=0;i<mergeState.storedFieldsReaders.length;i++) {
if (matching.matchingReaders[i] &&
mergeState.storedFieldsReaders[i] instanceof CompressingStoredFieldsReader) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(readers.size());
for(Map.Entry<Integer,BKDReader> ent : readers.entrySet()) {
resources.add(Accountables.namedAccountable(readState.fieldInfos.fieldInfo(ent.getKey()).name,
ent.getValue()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,16 +79,16 @@ public Polygon[] parse() throws ParseException {
if (polyType.equals("Polygon")) {
return new Polygon[] {parsePolygon(coordinates)};
} else {
List<Polygon> polygons = new ArrayList<>();
Polygon[] polygons = new Polygon[coordinates.size()];
for(int i=0;i<coordinates.size();i++) {
Object o = coordinates.get(i);
if (o instanceof List == false) {
throw newParseException("elements of coordinates array should be an array, but got: " + o.getClass());
}
polygons.add(parsePolygon((List<Object>) o));
polygons[i] = (parsePolygon((List<Object>) o));
}

return polygons.toArray(new Polygon[polygons.size()]);
return polygons;
}
}

Expand Down Expand Up @@ -217,11 +217,11 @@ private boolean isValidGeometryPath(String path) {
}

private Polygon parsePolygon(List<Object> coordinates) throws ParseException {
List<Polygon> holes = new ArrayList<>();
Object o = coordinates.get(0);
if (o instanceof List == false) {
throw newParseException("first element of polygon array must be an array [[lat, lon], [lat, lon] ...] but got: " + o);
}
List<Polygon> holes = new ArrayList<>(coordinates.size());
double[][] polyPoints = parsePoints((List<Object>) o);
for(int i=1;i<coordinates.size();i++) {
o = coordinates.get(i);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,15 @@ void update(FrozenBufferedUpdates in) {
queries.put(query, BufferedUpdates.MAX_INT);
}

List<DocValuesUpdate> numericPacket = new ArrayList<>();
List<DocValuesUpdate> numericPacket = new ArrayList<>(in.numericDVUpdates.length);
numericDVUpdates.add(numericPacket);
for (NumericDocValuesUpdate nu : in.numericDVUpdates) {
NumericDocValuesUpdate clone = new NumericDocValuesUpdate(nu.term, nu.field, (Long) nu.value);
clone.docIDUpto = Integer.MAX_VALUE;
numericPacket.add(clone);
}

List<DocValuesUpdate> binaryPacket = new ArrayList<>();
List<DocValuesUpdate> binaryPacket = new ArrayList<>(in.binaryDVUpdates.length);
binaryDVUpdates.add(binaryPacket);
for (BinaryDocValuesUpdate bu : in.binaryDVUpdates) {
BinaryDocValuesUpdate clone = new BinaryDocValuesUpdate(bu.term, bu.field, (BytesRef) bu.value);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4320,7 +4320,7 @@ private int mergeMiddle(MergePolicy.OneMerge merge, MergePolicy mergePolicy) thr
// System.out.println("[" + Thread.currentThread().getName() + "] IW.mergeMiddle: merging " + merge.getMergeReaders());

// Let the merge wrap readers
List<CodecReader> mergeReaders = new ArrayList<>();
List<CodecReader> mergeReaders = new ArrayList<>(merge.readers.size());
for (SegmentReader reader : merge.readers) {
mergeReaders.add(merge.wrapForMerge(reader));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -913,7 +913,7 @@ public long ramBytesUsed() {

@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
List<Accountable> resources = new ArrayList<>(3);
resources.add(Accountables.namedAccountable("global ord deltas", globalOrdDeltas));
resources.add(Accountables.namedAccountable("first segments", firstSegments));
resources.add(Accountables.namedAccountable("segment map", segmentMap));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,8 @@ public int freq() throws IOException {
long minCost = Math.min(
requiredNoScoring.stream().mapToLong(ScorerSupplier::cost).min().orElse(Long.MAX_VALUE),
requiredScoring.stream().mapToLong(ScorerSupplier::cost).min().orElse(Long.MAX_VALUE));
List<Scorer> requiredScorers = new ArrayList<>();
List<Scorer> scoringScorers = new ArrayList<>();
List<Scorer> requiredScorers = new ArrayList<>(requiredNoScoring.size() + requiredScoring.size());
List<Scorer> scoringScorers = new ArrayList<>(requiredScoring.size());
for (ScorerSupplier s : requiredNoScoring) {
requiredScorers.add(s.get(randomAccess || s.cost() > minCost));
}
Expand Down Expand Up @@ -206,7 +206,7 @@ protected boolean lessThan(ScorerSupplier a, ScorerSupplier b) {
}
return new MinShouldMatchSumScorer(weight, optionalScorers, minShouldMatch);
} else {
final List<Scorer> optionalScorers = new ArrayList<>();
final List<Scorer> optionalScorers = new ArrayList<>(optional.size());
for (ScorerSupplier scorer : optional) {
optionalScorers.add(scorer.get(randomAccess));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ final class BooleanWeight extends Weight {
this.query = query;
this.needsScores = needsScores;
this.similarity = searcher.getSimilarity(needsScores);
weights = new ArrayList<>();
weights = new ArrayList<>(query.clauses().size());
for (BooleanClause c : query) {
Weight w = searcher.createWeight(c.getQuery(), needsScores && c.isScoring(), boost);
weights.add(w);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public int freq() {

@Override
public Collection<ChildScorer> getChildren() {
ArrayList<ChildScorer> children = new ArrayList<>();
ArrayList<ChildScorer> children = new ArrayList<>(scorers.length);
for (Scorer scorer : scorers) {
children.add(new ChildScorer(scorer, "MUST"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ public Query rewrite(IndexReader reader) throws IOException {
}

boolean actuallyRewritten = false;
List<Query> rewrittenDisjuncts = new ArrayList<>();
List<Query> rewrittenDisjuncts = new ArrayList<>(disjuncts.length);
for (Query sub : disjuncts) {
Query rewrittenSub = sub.rewrite(reader);
actuallyRewritten |= rewrittenSub != sub;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ public final float score() throws IOException {

@Override
public final Collection<ChildScorer> getChildren() {
ArrayList<ChildScorer> children = new ArrayList<>();
ArrayList<ChildScorer> children = new ArrayList<>(subScorers.size());
for (DisiWrapper scorer : subScorers) {
children.add(new ChildScorer(scorer.scorer, "SHOULD"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ public PostingsAndPosition(PostingsEnum postings, int offset) {
this.docScorer = docScorer;
this.needsScores = needsScores;

List<DocIdSetIterator> iterators = new ArrayList<>();
List<PostingsAndPosition> postingsAndPositions = new ArrayList<>();
List<DocIdSetIterator> iterators = new ArrayList<>(postings.length);
List<PostingsAndPosition> postingsAndPositions = new ArrayList<>(postings.length);
for(PhraseQuery.PostingsAndFreq posting : postings) {
iterators.add(posting.postings);
postingsAndPositions.add(new PostingsAndPosition(posting.postings, posting.position));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,7 @@ public C call() throws Exception {
}));
}

final List<C> collectedCollectors = new ArrayList<>();
final List<C> collectedCollectors = new ArrayList<>(topDocsFutures.size());
for (Future<C> future : topDocsFutures) {
try {
collectedCollectors.add(future.get());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ protected boolean lessThan(Long a, Long b) {
addLead(new DisiWrapper(scorer));
}

List<ChildScorer> children = new ArrayList<>();
List<ChildScorer> children = new ArrayList<>(scorers.size());
for (Scorer scorer : scorers) {
children.add(new ChildScorer(scorer, "SHOULD"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ public synchronized void prune(Pruner pruner) throws IOException {
// (not thread-safe since the values can change while
// ArrayList is init'ing itself); must instead iterate
// ourselves:
final List<SearcherTracker> trackers = new ArrayList<>();
final List<SearcherTracker> trackers = new ArrayList<>(searchers.size());
for(SearcherTracker tracker : searchers.values()) {
trackers.add(tracker);
}
Expand Down
Loading