@Override public IndexSample sampleIndex() { return indexSamplers.parallelStream() .map( this::sampleIndex ) .reduce( this::combine ) .get(); }
public static <R> CompletableFuture<List<R>> sequenceSuccessFuture(List<CompletableFuture<R>> futures) { return CompletableFuture.supplyAsync(() -> futures.parallelStream() .map(AsyncUtils::getValue) .filter(Objects::nonNull) .collect(Collectors.toList()) ); }
/** * Return the candidate types that are associated with the specified stereotype. * @param basePackage the package to check for candidates * @param stereotype the stereotype to use * @return the candidate types associated with the specified {@code stereotype} * or an empty set if none has been found for the specified {@code basePackage} */ public Set<String> getCandidateTypes(String basePackage, String stereotype) { List<Entry> candidates = this.index.get(stereotype); if (candidates != null) { return candidates.parallelStream() .filter(t -> t.match(basePackage)) .map(t -> t.type) .collect(Collectors.toSet()); } return Collections.emptySet(); }
@Override public IndexSampler createSampler() { List<IndexSampler> indexSamplers = indexReaders.parallelStream() .map( SimpleIndexReader::createSampler ) .collect( Collectors.toList() ); return new AggregatingIndexSampler( indexSamplers ); }
@Override public long countIndexedNodes( long nodeId, int[] propertyKeyIds, Value... propertyValues ) { return indexReaders.parallelStream() .mapToLong( reader -> reader.countIndexedNodes( nodeId, propertyKeyIds, propertyValues ) ) .sum(); }
private PrimitiveLongResourceIterator partitionedOperation( Function<SimpleIndexReader,PrimitiveLongResourceIterator> readerFunction ) { return PrimitiveLongResourceCollections.concat( indexReaders.parallelStream() .map( readerFunction ) .collect( Collectors.toList() ) ); } }
/** * 逻辑删除 */ public boolean isLogicDelete(String logicDeletePropertyName) { return fields.parallelStream().anyMatch(tf -> tf.getName().equals(logicDeletePropertyName)); }
/** * Return the candidate types that are associated with the specified stereotype. * @param basePackage the package to check for candidates * @param stereotype the stereotype to use * @return the candidate types associated with the specified {@code stereotype} * or an empty set if none has been found for the specified {@code basePackage} */ public Set<String> getCandidateTypes(String basePackage, String stereotype) { List<Entry> candidates = this.index.get(stereotype); if (candidates != null) { return candidates.parallelStream() .filter(t -> t.match(basePackage)) .map(t -> t.type) .collect(Collectors.toSet()); } return Collections.emptySet(); }
/** * Refresh all partitions to make newly inserted data visible for readers. * * @throws IOException */ public void maybeRefreshBlocking() throws IOException { try { getPartitions().parallelStream().forEach( this::maybeRefreshPartition ); } catch ( UncheckedIOException e ) { throw e.getCause(); } }
@Test public void index_with_valueFunction_parallel_stream() { Multimap<String, String> multimap = HUGE_LIST.parallelStream().collect(index(identity(), identity())); assertThat(multimap.keySet()).isEqualTo(HUGE_SET); }
@Test public void uniqueIndex_with_valueFunction_and_expected_size_parallel_stream() { Map<String, String> map = HUGE_LIST.parallelStream().collect(uniqueIndex(identity(), identity(), HUGE_LIST.size())); assertThat(map.keySet()).isEqualTo(HUGE_SET); assertThat(map.values()).containsExactlyElementsOf(HUGE_SET); }
@Test public void index_parallel_stream() { Multimap<String, String> multimap = HUGE_LIST.parallelStream().collect(index(identity())); assertThat(multimap.keySet()).isEqualTo(HUGE_SET); }
@Test public void uniqueIndex_with_valueFunction_parallel_stream() { Map<String, String> map = HUGE_LIST.parallelStream().collect(uniqueIndex(identity(), identity())); assertThat(map.keySet()).isEqualTo(HUGE_SET); assertThat(map.values()).containsExactlyElementsOf(HUGE_SET); }
@Test public void uniqueIndex_with_expected_size_parallel_stream() { Map<String, String> map = HUGE_LIST.parallelStream().collect(uniqueIndex(identity(), HUGE_LIST.size())); assertThat(map.keySet()).isEqualTo(HUGE_SET); assertThat(map.values()).containsExactlyElementsOf(HUGE_SET); }
@Test public void uniqueIndex_parallel_stream() { Map<String, String> map = HUGE_LIST.parallelStream().collect(uniqueIndex(identity())); assertThat(map.keySet()).isEqualTo(HUGE_SET); assertThat(map.values()).containsExactlyElementsOf(HUGE_SET); }
@Test public void toList_with_size_parallel_stream() { assertThat(HUGE_LIST.parallelStream().collect(toList(HUGE_LIST.size()))).isEqualTo(HUGE_LIST); }
@Test public void toArrayList_with_size_parallel_stream() { assertThat(HUGE_LIST.parallelStream().collect(toArrayList(HUGE_LIST.size()))).isEqualTo(HUGE_LIST); }
@Test public void toList_parallel_stream() { assertThat(HUGE_LIST.parallelStream().collect(toList())).isEqualTo(HUGE_LIST); }
@Test public void toArrayList_parallel_stream() { assertThat(HUGE_LIST.parallelStream().collect(toArrayList())).isEqualTo(HUGE_LIST); }
@Test public void join_does_not_support_parallel_stream_and_fails_with_ISE() { Stream<String> hugeStream = HUGE_LIST.parallelStream(); expectedException.expect(IllegalStateException.class); expectedException.expectMessage("Parallel processing is not supported"); hugeStream.collect(join(Joiner.on(" "))); }