language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryEvaluator.java
|
{
"start": 1760,
"end": 2044
}
|
class ____ {@link LuceneQueryScoreEvaluator} for
* examples of subclasses that provide different types of scoring results for different ESQL constructs.
* It's much faster to push queries to the {@link LuceneSourceOperator} or the like, but sometimes this isn't possible. So
* this
|
and
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
|
{
"start": 1352,
"end": 9021
}
|
class ____ extends OutputStream implements
StreamCapabilities {
// data checksum
private final DataChecksum sum;
// internal buffer for storing data before it is checksumed
private byte buf[];
// internal buffer for storing checksum
private byte checksum[];
// The number of valid bytes in the buffer.
private int count;
// We want this value to be a multiple of 3 because the native code checksums
// 3 chunks simultaneously. The chosen value of 9 strikes a balance between
// limiting the number of JNI calls and flushing to the underlying stream
// relatively frequently.
private static final int BUFFER_NUM_CHUNKS = 9;
protected FSOutputSummer(DataChecksum sum) {
this.sum = sum;
this.buf = new byte[sum.getBytesPerChecksum() * BUFFER_NUM_CHUNKS];
this.checksum = new byte[getChecksumSize() * BUFFER_NUM_CHUNKS];
this.count = 0;
}
/* write the data chunk in <code>b</code> staring at <code>offset</code> with
* a length of <code>len > 0</code>, and its checksum
*/
protected abstract void writeChunk(byte[] b, int bOffset, int bLen,
byte[] checksum, int checksumOffset, int checksumLen) throws IOException;
/**
* Check if the implementing OutputStream is closed and should no longer
* accept writes. Implementations should do nothing if this stream is not
* closed, and should throw an {@link IOException} if it is closed.
*
* @throws IOException if this stream is already closed.
*/
protected abstract void checkClosed() throws IOException;
/** Write one byte */
@Override
public synchronized void write(int b) throws IOException {
buf[count++] = (byte)b;
if(count == buf.length) {
flushBuffer();
}
}
/**
* Writes <code>len</code> bytes from the specified byte array
* starting at offset <code>off</code> and generate a checksum for
* each data chunk.
*
* <p> This method stores bytes from the given array into this
* stream's buffer before it gets checksumed. The buffer gets checksumed
* and flushed to the underlying output stream when all data
* in a checksum chunk are in the buffer. If the buffer is empty and
* requested length is at least as large as the size of next checksum chunk
* size, this method will checksum and write the chunk directly
* to the underlying output stream. Thus it avoids unnecessary data copy.
*
* @param b the data.
* @param off the start offset in the data.
* @param len the number of bytes to write.
* @exception IOException if an I/O error occurs.
*/
@Override
public synchronized void write(byte b[], int off, int len)
throws IOException {
checkClosed();
if (off < 0 || len < 0 || off > b.length - len) {
throw new ArrayIndexOutOfBoundsException();
}
for (int n=0;n<len;n+=write1(b, off+n, len-n)) {
}
}
/**
* Write a portion of an array, flushing to the underlying
* stream at most once if necessary.
*/
private int write1(byte b[], int off, int len) throws IOException {
if(count==0 && len>=buf.length) {
// local buffer is empty and user buffer size >= local buffer size, so
// simply checksum the user buffer and send it directly to the underlying
// stream
final int length = buf.length;
writeChecksumChunks(b, off, length);
return length;
}
// copy user data to local buffer
int bytesToCopy = buf.length-count;
bytesToCopy = (len<bytesToCopy) ? len : bytesToCopy;
System.arraycopy(b, off, buf, count, bytesToCopy);
count += bytesToCopy;
if (count == buf.length) {
// local buffer is full
flushBuffer();
}
return bytesToCopy;
}
/* Forces any buffered output bytes to be checksumed and written out to
* the underlying output stream.
*/
protected synchronized void flushBuffer() throws IOException {
flushBuffer(false, true);
}
/* Forces buffered output bytes to be checksummed and written out to
* the underlying output stream. If there is a trailing partial chunk in the
* buffer,
* 1) flushPartial tells us whether to flush that chunk
* 2) if flushPartial is true, keep tells us whether to keep that chunk in the
* buffer (if flushPartial is false, it is always kept in the buffer)
*
* Returns the number of bytes that were flushed but are still left in the
* buffer (can only be non-zero if keep is true).
*/
protected synchronized int flushBuffer(boolean keep,
boolean flushPartial) throws IOException {
int bufLen = count;
int partialLen = bufLen % sum.getBytesPerChecksum();
int lenToFlush = flushPartial ? bufLen : bufLen - partialLen;
if (lenToFlush != 0) {
writeChecksumChunks(buf, 0, lenToFlush);
if (!flushPartial || keep) {
count = partialLen;
System.arraycopy(buf, bufLen - count, buf, 0, count);
} else {
count = 0;
}
}
// total bytes left minus unflushed bytes left
return count - (bufLen - lenToFlush);
}
/**
* Checksums all complete data chunks and flushes them to the underlying
* stream. If there is a trailing partial chunk, it is not flushed and is
* maintained in the buffer.
*/
public void flush() throws IOException {
flushBuffer(false, false);
}
/**
* Return the number of valid bytes currently in the buffer.
*
* @return buffer data size.
*/
protected synchronized int getBufferedDataSize() {
return count;
}
/** @return the size for a checksum. */
protected int getChecksumSize() {
return sum.getChecksumSize();
}
protected DataChecksum getDataChecksum() {
return sum;
}
protected TraceScope createWriteTraceScope() {
return null;
}
/** Generate checksums for the given data chunks and output chunks & checksums
* to the underlying output stream.
*/
private void writeChecksumChunks(byte b[], int off, int len)
throws IOException {
sum.calculateChunkedSums(b, off, len, checksum, 0);
TraceScope scope = createWriteTraceScope();
try {
for (int i = 0; i < len; i += sum.getBytesPerChecksum()) {
int chunkLen = Math.min(sum.getBytesPerChecksum(), len - i);
int ckOffset = i / sum.getBytesPerChecksum() * getChecksumSize();
writeChunk(b, off + i, chunkLen, checksum, ckOffset,
getChecksumSize());
}
} finally {
if (scope != null) {
scope.close();
}
}
}
/**
* Converts a checksum integer value to a byte stream
*
* @param sum check sum.
* @param checksumSize check sum size.
* @return byte stream.
*/
static public byte[] convertToByteStream(Checksum sum, int checksumSize) {
return int2byte((int)sum.getValue(), new byte[checksumSize]);
}
static byte[] int2byte(int integer, byte[] bytes) {
if (bytes.length != 0) {
bytes[0] = (byte) ((integer >>> 24) & 0xFF);
bytes[1] = (byte) ((integer >>> 16) & 0xFF);
bytes[2] = (byte) ((integer >>> 8) & 0xFF);
bytes[3] = (byte) ((integer >>> 0) & 0xFF);
return bytes;
}
return bytes;
}
/**
* Resets existing buffer with a new one of the specified size.
*
* @param size size.
*/
protected synchronized void setChecksumBufSize(int size) {
this.buf = new byte[size];
this.checksum = new byte[sum.getChecksumSize(size)];
this.count = 0;
}
protected synchronized void resetChecksumBufSize() {
setChecksumBufSize(sum.getBytesPerChecksum() * BUFFER_NUM_CHUNKS);
}
@Override
public boolean hasCapability(String capability) {
return false;
}
}
|
FSOutputSummer
|
java
|
spring-projects__spring-framework
|
spring-jms/src/main/java/org/springframework/jms/support/converter/SmartMessageConverter.java
|
{
"start": 1147,
"end": 2018
}
|
interface ____ extends MessageConverter {
/**
* A variant of {@link #toMessage(Object, Session)} which takes an extra conversion
* context as an argument, allowing to take, for example, annotations on a payload parameter
* into account.
* @param object the object to convert
* @param session the Session to use for creating a JMS Message
* @param conversionHint an extra object passed to the {@link MessageConverter},
* for example, the associated {@code MethodParameter} (may be {@code null})
* @return the JMS Message
* @throws jakarta.jms.JMSException if thrown by JMS API methods
* @throws MessageConversionException in case of conversion failure
* @see #toMessage(Object, Session)
*/
Message toMessage(Object object, Session session, @Nullable Object conversionHint)
throws JMSException, MessageConversionException;
}
|
SmartMessageConverter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/GetMigrationReindexStatusAction.java
|
{
"start": 2979,
"end": 4422
}
|
class ____ extends LegacyActionRequest implements IndicesRequest {
private final String index;
public Request(String index) {
super();
this.index = index;
}
public Request(StreamInput in) throws IOException {
super(in);
this.index = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(index);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
public String getIndex() {
return index;
}
@Override
public int hashCode() {
return Objects.hashCode(index);
}
@Override
public boolean equals(Object other) {
return other instanceof Request && index.equals(((Request) other).index);
}
public Request nodeRequest(String thisNodeId, long thisTaskId) {
Request copy = new Request(index);
copy.setParentTask(thisNodeId, thisTaskId);
return copy;
}
@Override
public String[] indices() {
return new String[] { index };
}
@Override
public IndicesOptions indicesOptions() {
return IndicesOptions.strictSingleIndexNoExpandForbidClosed();
}
}
}
|
Request
|
java
|
grpc__grpc-java
|
examples/src/main/java/io/grpc/examples/errorhandling/DetailErrorSample.java
|
{
"start": 2204,
"end": 7689
}
|
class ____ {
private static final Metadata.Key<DebugInfo> DEBUG_INFO_TRAILER_KEY =
ProtoUtils.keyForProto(DebugInfo.getDefaultInstance());
private static final DebugInfo DEBUG_INFO =
DebugInfo.newBuilder()
.addStackEntries("stack_entry_1")
.addStackEntries("stack_entry_2")
.addStackEntries("stack_entry_3")
.setDetail("detailed error info.").build();
private static final String DEBUG_DESC = "detailed error description";
public static void main(String[] args) throws Exception {
new DetailErrorSample().run();
}
private ManagedChannel channel;
void run() throws Exception {
Server server = Grpc.newServerBuilderForPort(0, InsecureServerCredentials.create())
.addService(new GreeterGrpc.GreeterImplBase() {
@Override
public void sayHello(HelloRequest request, StreamObserver<HelloReply> responseObserver) {
Metadata trailers = new Metadata();
trailers.put(DEBUG_INFO_TRAILER_KEY, DEBUG_INFO);
responseObserver.onError(Status.INTERNAL.withDescription(DEBUG_DESC)
.asRuntimeException(trailers));
}
}).build().start();
channel = Grpc.newChannelBuilderForAddress(
"localhost", server.getPort(), InsecureChannelCredentials.create()).build();
blockingCall();
futureCallDirect();
futureCallCallback();
asyncCall();
advancedAsyncCall();
channel.shutdown();
server.shutdown();
channel.awaitTermination(1, TimeUnit.SECONDS);
server.awaitTermination();
}
static void verifyErrorReply(Throwable t) {
Status status = Status.fromThrowable(t);
Metadata trailers = Status.trailersFromThrowable(t);
Verify.verify(status.getCode() == Status.Code.INTERNAL);
Verify.verify(trailers.containsKey(DEBUG_INFO_TRAILER_KEY));
Verify.verify(status.getDescription().equals(DEBUG_DESC));
try {
Verify.verify(trailers.get(DEBUG_INFO_TRAILER_KEY).equals(DEBUG_INFO));
} catch (IllegalArgumentException e) {
throw new VerifyException(e);
}
}
void blockingCall() {
GreeterBlockingStub stub = GreeterGrpc.newBlockingStub(channel);
try {
stub.sayHello(HelloRequest.newBuilder().build());
} catch (Exception e) {
verifyErrorReply(e);
}
}
void futureCallDirect() {
GreeterFutureStub stub = GreeterGrpc.newFutureStub(channel);
ListenableFuture<HelloReply> response =
stub.sayHello(HelloRequest.newBuilder().build());
try {
response.get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} catch (ExecutionException e) {
verifyErrorReply(e.getCause());
}
}
void futureCallCallback() {
GreeterFutureStub stub = GreeterGrpc.newFutureStub(channel);
ListenableFuture<HelloReply> response =
stub.sayHello(HelloRequest.newBuilder().build());
final CountDownLatch latch = new CountDownLatch(1);
Futures.addCallback(
response,
new FutureCallback<HelloReply>() {
@Override
public void onSuccess(@Nullable HelloReply result) {
// Won't be called, since the server in this example always fails.
}
@Override
public void onFailure(Throwable t) {
verifyErrorReply(t);
latch.countDown();
}
},
directExecutor());
if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.SECONDS)) {
throw new RuntimeException("timeout!");
}
}
void asyncCall() {
GreeterStub stub = GreeterGrpc.newStub(channel);
HelloRequest request = HelloRequest.newBuilder().build();
final CountDownLatch latch = new CountDownLatch(1);
StreamObserver<HelloReply> responseObserver = new StreamObserver<HelloReply>() {
@Override
public void onNext(HelloReply value) {
// Won't be called.
}
@Override
public void onError(Throwable t) {
verifyErrorReply(t);
latch.countDown();
}
@Override
public void onCompleted() {
// Won't be called, since the server in this example always fails.
}
};
stub.sayHello(request, responseObserver);
if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.SECONDS)) {
throw new RuntimeException("timeout!");
}
}
/**
* This is more advanced and does not make use of the stub. You should not normally need to do
* this, but here is how you would.
*/
void advancedAsyncCall() {
ClientCall<HelloRequest, HelloReply> call =
channel.newCall(GreeterGrpc.getSayHelloMethod(), CallOptions.DEFAULT);
final CountDownLatch latch = new CountDownLatch(1);
call.start(new ClientCall.Listener<HelloReply>() {
@Override
public void onClose(Status status, Metadata trailers) {
Verify.verify(status.getCode() == Status.Code.INTERNAL);
Verify.verify(trailers.containsKey(DEBUG_INFO_TRAILER_KEY));
try {
Verify.verify(trailers.get(DEBUG_INFO_TRAILER_KEY).equals(DEBUG_INFO));
} catch (IllegalArgumentException e) {
throw new VerifyException(e);
}
latch.countDown();
}
}, new Metadata());
call.sendMessage(HelloRequest.newBuilder().build());
call.halfClose();
if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.SECONDS)) {
throw new RuntimeException("timeout!");
}
}
}
|
DetailErrorSample
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/bugpatterns/BugChecker.java
|
{
"start": 5345,
"end": 12851
}
|
class ____ implements Suppressible, Serializable {
private final BugCheckerInfo info;
private final BiPredicate<Set<? extends Name>, VisitorState> checkSuppression;
public BugChecker() {
info = BugCheckerInfo.create(getClass());
checkSuppression = suppressionPredicate(info.customSuppressionAnnotations());
}
private static BiPredicate<Set<? extends Name>, VisitorState> suppressionPredicate(
Set<Class<? extends Annotation>> suppressionClasses) {
return switch (suppressionClasses.size()) {
case 0 -> (annos, state) -> false;
case 1 -> {
Supplier<Name> self =
VisitorState.memoize(
state -> state.getName(Iterables.getOnlyElement(suppressionClasses).getName()));
yield (annos, state) -> annos.contains(self.get(state));
}
default -> {
Supplier<Set<? extends Name>> self =
VisitorState.memoize(
state ->
suppressionClasses.stream()
.map(Class::getName)
.map(state::getName)
.collect(toImmutableSet()));
yield (annos, state) -> !Collections.disjoint(self.get(state), annos);
}
};
}
/** Helper to create a Description for the common case where there is a fix. */
@CheckReturnValue
public Description describeMatch(Tree node, Fix fix) {
return buildDescription(node).addFix(fix).build();
}
/** Helper to create a Description for the common case where there is a fix. */
@CheckReturnValue
public Description describeMatch(JCTree node, Fix fix) {
return describeMatch((Tree) node, fix);
}
/** Helper to create a Description for the common case where there is a fix. */
@CheckReturnValue
public Description describeMatch(DiagnosticPosition position, Fix fix) {
return buildDescription(position).addFix(fix).build();
}
/** Helper to create a Description for the common case where there is no fix. */
@CheckReturnValue
public Description describeMatch(Tree node) {
return buildDescription(node).build();
}
/** Helper to create a Description for the common case where there is no fix. */
@CheckReturnValue
public Description describeMatch(JCTree node) {
return buildDescription(node).build();
}
/** Helper to create a Description for the common case where there is no fix. */
@CheckReturnValue
public Description describeMatch(DiagnosticPosition position) {
return buildDescription(position).build();
}
/**
* Returns a Description builder, which allows you to customize the diagnostic with a custom
* message or multiple fixes.
*/
@CheckReturnValue
public Description.Builder buildDescription(Tree node) {
return Description.builder(node, canonicalName(), linkUrl(), message());
}
/**
* Returns a Description builder, which allows you to customize the diagnostic with a custom
* message or multiple fixes.
*/
@CheckReturnValue
public Description.Builder buildDescription(DiagnosticPosition position) {
return Description.builder(position, canonicalName(), linkUrl(), message());
}
/**
* Returns a Description builder, which allows you to customize the diagnostic with a custom
* message or multiple fixes.
*/
// This overload exists purely to disambiguate for JCTree.
@CheckReturnValue
public Description.Builder buildDescription(JCTree tree) {
return Description.builder(tree, canonicalName(), linkUrl(), message());
}
@Override
public String canonicalName() {
return info.canonicalName();
}
@Override
public Set<String> allNames() {
return info.allNames();
}
public String message() {
return info.message();
}
public SeverityLevel defaultSeverity() {
return info.defaultSeverity();
}
public String linkUrl() {
return info.linkUrl();
}
@Override
public boolean supportsSuppressWarnings() {
return info.supportsSuppressWarnings();
}
public boolean disableable() {
return info.disableable();
}
@Override
public Set<Class<? extends Annotation>> customSuppressionAnnotations() {
return info.customSuppressionAnnotations();
}
@Override
public boolean suppressedByAnyOf(Set<Name> annotations, VisitorState s) {
return checkSuppression.test(annotations, s);
}
/**
* Returns true if the given tree is annotated with a {@code @SuppressWarnings} that disables this
* bug checker.
*/
public boolean isSuppressed(Tree tree, VisitorState state) {
Symbol sym = getDeclaredSymbol(tree);
/*
* TODO(cpovirk): At least for @SuppressWarnings, should our suppression checks look for
* annotations only on the kinds of trees that are covered by SuppressibleTreePathScanner? Or,
* now that @SuppressWarnings has been changed to be applicable to all declaration locations,
* should we generalize SuppressibleTreePathScanner to look on all those locations?
*/
return sym != null && isSuppressed(sym, state);
}
/**
* Returns true if the given symbol is annotated with a {@code @SuppressWarnings} or other
* annotation that disables this bug checker.
*/
/*
* TODO(cpovirk): Would we consider deleting this overload (or at least making it `private`)? Its
* callers appear to all have access to a Tree, and callers might accidentally pass
* getSymbol(tree) instead of getDeclaredSymbol(tree), resulting in over-suppression. Fortunately,
* the Tree probably provides all that we need, at least for looking for @SuppressWarnings. It
* does *not* provide all that we need for looking for any @Inherited suppression annotations (if
* such annotations are something that we (a) support and (b) want to support), but we can always
* call getDeclaredSymbol inside the implementation where necessary.
*/
public boolean isSuppressed(Symbol sym, VisitorState state) {
ErrorProneOptions errorProneOptions = state.errorProneOptions();
boolean suppressedInGeneratedCode =
errorProneOptions.disableWarningsInGeneratedCode()
&& state.severityMap().get(canonicalName()) != SeverityLevel.ERROR;
SuppressionInfo.SuppressedState suppressedState =
SuppressionInfo.EMPTY
.withExtendedSuppressions(sym, state, customSuppressionAnnotationNames.get(state))
.suppressedState(BugChecker.this, suppressedInGeneratedCode, state);
return suppressedState == SuppressionInfo.SuppressedState.SUPPRESSED;
}
private final Supplier<? extends Set<? extends Name>> customSuppressionAnnotationNames =
VisitorState.memoize(
state ->
customSuppressionAnnotations().stream()
.map(a -> state.getName(a.getName()))
.collect(toImmutableSet()));
/** Computes a RangeSet of code regions which are suppressed by this bug checker. */
public ImmutableRangeSet<Integer> suppressedRegions(VisitorState state) {
TreeRangeSet<Integer> suppressedRegions = TreeRangeSet.create();
new TreeScanner<Void, Void>() {
@Override
public Void scan(Tree tree, Void unused) {
if (getModifiers(tree) != null && isSuppressed(tree, state)) {
suppressedRegions.add(Range.closed(getStartPosition(tree), state.getEndPosition(tree)));
} else {
super.scan(tree, null);
}
return null;
}
}.scan(state.getPath().getCompilationUnit(), null);
return ImmutableRangeSet.copyOf(suppressedRegions);
}
public
|
BugChecker
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/matching/PathParamOverlapTest.java
|
{
"start": 645,
"end": 2526
}
|
class ____ {
@RegisterExtension
static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(TestResource.class);
}
});
@Test
public void test() {
get("/hello/some/test")
.then()
.statusCode(200)
.body(equalTo("Hello World!"));
get("/hello/other/test/new")
.then()
.statusCode(200)
.body(equalTo("Hello other"));
get("/hello/some/test/new")
.then()
.statusCode(200)
.body(equalTo("Hello some"));
get("/hello/some/test/wrong")
.then()
.statusCode(404);
get("/hello/other/test/wrong")
.then()
.statusCode(404);
get("/hello/foo")
.then()
.statusCode(404);
get("/hello/foo/value")
.then()
.statusCode(200)
.body(equalTo("Foo value"));
get("/hello/foo/bar")
.then()
.statusCode(200)
.body(equalTo("Foo bar"));
get("/hello/foo/bar/value")
.then()
.statusCode(200)
.body(equalTo("FooBar value"));
get("/hello/foo/bah_value")
.then()
.statusCode(200)
.body(equalTo("Foo bah_value"));
get("/hello/foo/bar_value")
.then()
.statusCode(200)
.body(equalTo("Foo bar_value"));
}
@Path("/hello")
public static
|
PathParamOverlapTest
|
java
|
apache__camel
|
components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/SharedQueueSimpleMessageListenerContainer.java
|
{
"start": 1528,
"end": 2617
}
|
class ____ extends SimpleJmsMessageListenerContainer {
private String fixedMessageSelector;
private MessageSelectorCreator creator;
/**
* Use a fixed JMS message selector
*
* @param endpoint the endpoint
* @param fixedMessageSelector the fixed selector
*/
public SharedQueueSimpleMessageListenerContainer(JmsEndpoint endpoint, String fixedMessageSelector) {
super(endpoint);
this.fixedMessageSelector = fixedMessageSelector;
}
/**
* Use a dynamic JMS message selector
*
* @param endpoint the endpoint
* @param creator the creator to create the dynamic selector
*/
public SharedQueueSimpleMessageListenerContainer(JmsEndpoint endpoint, MessageSelectorCreator creator) {
super(endpoint);
this.creator = creator;
}
// override this method and return the appropriate selector
@Override
public String getMessageSelector() {
return JmsReplyHelper.getMessageSelector(fixedMessageSelector, creator);
}
}
|
SharedQueueSimpleMessageListenerContainer
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/SpelReproTests.java
|
{
"start": 68099,
"end": 68182
}
|
class ____ {
public boolean method(Object o) {
return false;
}
}
static
|
CCC
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/descriptor/LifecycleMethodUtilsTests.java
|
{
"start": 1655,
"end": 2646
}
|
class ____ {
List<DiscoveryIssue> discoveryIssues = new ArrayList<>();
DiscoveryIssueReporter issueReporter = DiscoveryIssueReporter.collecting(discoveryIssues);
@Test
void findNonVoidBeforeAllMethodsWithStandardLifecycle() throws Exception {
var methods = findBeforeAllMethods(TestCaseWithInvalidLifecycleMethods.class, true, issueReporter);
assertThat(methods).isEmpty();
var methodSource = MethodSource.from(TestCaseWithInvalidLifecycleMethods.class.getDeclaredMethod("cc"));
var notVoidIssue = DiscoveryIssue.builder(Severity.ERROR,
"@BeforeAll method 'private java.lang.Double org.junit.jupiter.engine.descriptor.TestCaseWithInvalidLifecycleMethods.cc()' must not return a value.") //
.source(methodSource) //
.build();
var notStaticIssue = DiscoveryIssue.builder(Severity.ERROR,
"@BeforeAll method 'private java.lang.Double org.junit.jupiter.engine.descriptor.TestCaseWithInvalidLifecycleMethods.cc()' must be static unless the test
|
LifecycleMethodUtilsTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/NoCanIgnoreReturnValueOnClassesTest.java
|
{
"start": 12766,
"end": 13112
}
|
class ____ {
abstract String id();
}
}
""")
.addOutputLines(
"Outer.java",
"package com.google.frobber;",
"import com.google.auto.value.AutoValue;",
"import com.google.errorprone.annotations.CanIgnoreReturnValue;",
"public
|
Inner
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/SampleExecSerializationTests.java
|
{
"start": 520,
"end": 1801
}
|
class ____ extends AbstractPhysicalPlanSerializationTests<SampleExec> {
/**
* Creates a random test instance to use in the tests. This method will be
* called multiple times during test execution and should return a different
* random instance each time it is called.
*/
@Override
protected SampleExec createTestInstance() {
return new SampleExec(randomSource(), randomChild(0), randomProbability());
}
/**
* Returns an instance which is mutated slightly so it should not be equal
* to the given instance.
*
* @param instance
*/
@Override
protected SampleExec mutateInstance(SampleExec instance) throws IOException {
var probability = instance.probability();
var child = instance.child();
int updateSelector = randomIntBetween(0, 1);
switch (updateSelector) {
case 0 -> probability = randomValueOtherThan(probability, SampleSerializationTests::randomProbability);
case 1 -> child = randomValueOtherThan(child, () -> randomChild(0));
default -> throw new IllegalArgumentException("Invalid selector: " + updateSelector);
}
return new SampleExec(instance.source(), child, probability);
}
}
|
SampleExecSerializationTests
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/http/OAuth2ClientBeanDefinitionParserTests.java
|
{
"start": 12870,
"end": 13151
}
|
class ____ {
@GetMapping("/authorized-client")
String authorizedClient(Model model,
@RegisteredOAuth2AuthorizedClient("google") OAuth2AuthorizedClient authorizedClient) {
return (authorizedClient != null) ? "resolved" : "not-resolved";
}
}
}
|
AuthorizedClientController
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytomany/ManyToManyListBidirectionalTest.java
|
{
"start": 2477,
"end": 3313
}
|
class ____ {
@Id
private int id;
public Book() {
}
public Book(int id) {
this.id = id;
}
public Book(int id, Author author) {
this.id = id;
link( author );
}
private void link(Author author) {
authors.add( author );
author.books.add( this );
}
public Book(int id, Author... authors) {
this.id = id;
ArrayHelper.forEach( authors, this::link );
}
@ManyToMany
@JoinTable(name = "book_author",
joinColumns = { @JoinColumn(name = "fk_book") },
inverseJoinColumns = { @JoinColumn(name = "fk_author") })
private List<Author> authors = new ArrayList<>();
public void addAuthor(Author author) {
link( author );
}
@Override
public String toString() {
return "Book(" + id + ")@" + Integer.toHexString( hashCode() );
}
}
@Entity(name = "Author")
public static
|
Book
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/runtime/src/main/java/io/quarkus/resteasy/reactive/server/runtime/EndpointScoresSupplier.java
|
{
"start": 189,
"end": 527
}
|
class ____ implements Supplier<ScoreSystem.EndpointScores> {
@Override
public ScoreSystem.EndpointScores get() {
var result = ScoreSystem.latestScores;
if (result != null) {
return result;
}
return new ScoreSystem.EndpointScores(0, Collections.emptyList());
}
}
|
EndpointScoresSupplier
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/broadcast/BroadcastOnMessageTest.java
|
{
"start": 736,
"end": 4077
}
|
class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(Up.class, UpBlocking.class, UpMultiBidi.class);
});
@TestHTTPResource("up")
URI upUri;
@TestHTTPResource("up-blocking")
URI upBlockingUri;
@TestHTTPResource("up-multi-bidi")
URI upMultiBidiUri;
@Inject
Vertx vertx;
@Test
public void testUp() throws Exception {
assertBroadcast(upUri);
}
@Test
public void testUpBlocking() throws Exception {
assertBroadcast(upBlockingUri);
}
@Test
public void testUpMultiBidi() throws Exception {
assertBroadcast(upMultiBidiUri);
}
public void assertBroadcast(URI testUri) throws Exception {
WebSocketClient client1 = vertx.createWebSocketClient();
WebSocketClient client2 = vertx.createWebSocketClient();
try {
CountDownLatch connectedLatch = new CountDownLatch(2);
CountDownLatch messagesLatch = new CountDownLatch(2);
AtomicReference<WebSocket> ws1 = new AtomicReference<>();
List<String> messages = new CopyOnWriteArrayList<>();
client1
.connect(testUri.getPort(), testUri.getHost(), testUri.getPath() + "/1")
.onComplete(r -> {
if (r.succeeded()) {
WebSocket ws = r.result();
ws.textMessageHandler(msg -> {
messages.add(msg);
messagesLatch.countDown();
});
// We will use this socket to write a message later on
ws1.set(ws);
connectedLatch.countDown();
} else {
throw new IllegalStateException(r.cause());
}
});
client2
.connect(testUri.getPort(), testUri.getHost(), testUri.getPath() + "/2")
.onComplete(r -> {
if (r.succeeded()) {
WebSocket ws = r.result();
ws.textMessageHandler(msg -> {
messages.add(msg);
messagesLatch.countDown();
});
connectedLatch.countDown();
} else {
throw new IllegalStateException(r.cause());
}
});
assertTrue(connectedLatch.await(5, TimeUnit.SECONDS));
ws1.get().writeTextMessage("hello");
assertTrue(messagesLatch.await(5, TimeUnit.SECONDS), "Messages: " + messages);
assertEquals(2, messages.size(), "Messages: " + messages);
// Both messages come from the first client
assertEquals("1:HELLO", messages.get(0));
assertEquals("1:HELLO", messages.get(1));
} finally {
client1.close().toCompletionStage().toCompletableFuture().get();
client2.close().toCompletionStage().toCompletableFuture().get();
}
}
}
|
BroadcastOnMessageTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/subresource/ParameterizedParentInterfaceTest.java
|
{
"start": 4061,
"end": 4153
}
|
interface ____<S> {
@GET
@Path("something")
S get();
}
public
|
Z
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLShowACLStatement.java
|
{
"start": 864,
"end": 1615
}
|
class ____ extends SQLStatementImpl implements SQLShowStatement, SQLReplaceable {
protected SQLExprTableSource table;
public SQLExprTableSource getTable() {
return table;
}
public void setTable(SQLExprTableSource x) {
if (x != null) {
x.setParent(this);
}
this.table = x;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
if (table != null) {
table.accept(visitor);
}
}
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
if (table != null) {
return table.replace(expr, target);
}
return false;
}
}
|
SQLShowACLStatement
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java
|
{
"start": 29748,
"end": 36033
}
|
class ____ extends BaseExpression implements Expression {
private Expression expr;
private FileStatusChecker checker;
public TestExpression(Expression expr, FileStatusChecker checker) {
this.expr = expr;
this.checker = checker;
}
@Override
public Result apply(PathData item, int depth) throws IOException {
FileStatus fileStatus = getFileStatus(item, depth);
checker.check(fileStatus);
return expr.apply(item, depth);
}
@Override
public void setOptions(FindOptions options) throws IOException {
super.setOptions(options);
expr.setOptions(options);
}
@Override
public void prepare() throws IOException {
expr.prepare();
}
@Override
public void finish() throws IOException {
expr.finish();
}
}
// creates a directory structure for traversal
// item1 (directory)
// \- item1a (directory)
// \- item1aa (file)
// \- item1b (file)
// item2 (directory)
// item3 (file)
// item4 (link) -> item3
// item5 (directory)
// \- item5a (link) -> item1b
// \- item5b (link) -> item5 (infinite loop)
// \- item5c (directory)
// \- item5ca (file)
// \- item5d (link) -> item5c
// \- item5e (link) -> item5c/item5ca
private PathData item1 = null;
private PathData item1a = null;
private PathData item1aa = null;
private PathData item1b = null;
private PathData item2 = null;
private PathData item3 = null;
private PathData item4 = null;
private PathData item5 = null;
private PathData item5a = null;
private PathData item5b = null;
private PathData item5c = null;
private PathData item5ca = null;
private PathData item5d = null;
private PathData item5e = null;
private LinkedList<PathData> createDirectories() throws IOException {
item1 = createPathData("item1");
item1a = createPathData("item1/item1a");
item1aa = createPathData("item1/item1a/item1aa");
item1b = createPathData("item1/item1b");
item2 = createPathData("item2");
item3 = createPathData("item3");
item4 = createPathData("item4");
item5 = createPathData("item5");
item5a = createPathData("item5/item5a");
item5b = createPathData("item5/item5b");
item5c = createPathData("item5/item5c");
item5ca = createPathData("item5/item5c/item5ca");
item5d = createPathData("item5/item5d");
item5e = createPathData("item5/item5e");
LinkedList<PathData> args = new LinkedList<PathData>();
when(item1.stat.isDirectory()).thenReturn(true);
when(item1a.stat.isDirectory()).thenReturn(true);
when(item1aa.stat.isDirectory()).thenReturn(false);
when(item1b.stat.isDirectory()).thenReturn(false);
when(item2.stat.isDirectory()).thenReturn(true);
when(item3.stat.isDirectory()).thenReturn(false);
when(item4.stat.isDirectory()).thenReturn(false);
when(item5.stat.isDirectory()).thenReturn(true);
when(item5a.stat.isDirectory()).thenReturn(false);
when(item5b.stat.isDirectory()).thenReturn(false);
when(item5c.stat.isDirectory()).thenReturn(true);
when(item5ca.stat.isDirectory()).thenReturn(false);
when(item5d.stat.isDirectory()).thenReturn(false);
when(item5e.stat.isDirectory()).thenReturn(false);
when(mockFs.listStatus(eq(item1.path))).thenReturn(
new FileStatus[] { item1a.stat, item1b.stat });
when(mockFs.listStatus(eq(item1a.path))).thenReturn(
new FileStatus[] { item1aa.stat });
when(mockFs.listStatus(eq(item2.path))).thenReturn(new FileStatus[0]);
when(mockFs.listStatus(eq(item5.path))).thenReturn(
new FileStatus[] { item5a.stat, item5b.stat, item5c.stat, item5d.stat,
item5e.stat });
when(mockFs.listStatus(eq(item5c.path))).thenReturn(
new FileStatus[] { item5ca.stat });
when(mockFs.listStatusIterator(Mockito.any(Path.class)))
.thenAnswer(new Answer<RemoteIterator<FileStatus>>() {
@Override
public RemoteIterator<FileStatus> answer(InvocationOnMock invocation)
throws Throwable {
final Path p = (Path) invocation.getArguments()[0];
final FileStatus[] stats = mockFs.listStatus(p);
return new RemoteIterator<FileStatus>() {
private int i = 0;
@Override
public boolean hasNext() throws IOException {
return i < stats.length;
}
@Override
public FileStatus next() throws IOException {
if (!hasNext()) {
throw new NoSuchElementException("No more entry in " + p);
}
return stats[i++];
}
};
}
});
when(item1.stat.isSymlink()).thenReturn(false);
when(item1a.stat.isSymlink()).thenReturn(false);
when(item1aa.stat.isSymlink()).thenReturn(false);
when(item1b.stat.isSymlink()).thenReturn(false);
when(item2.stat.isSymlink()).thenReturn(false);
when(item3.stat.isSymlink()).thenReturn(false);
when(item4.stat.isSymlink()).thenReturn(true);
when(item5.stat.isSymlink()).thenReturn(false);
when(item5a.stat.isSymlink()).thenReturn(true);
when(item5b.stat.isSymlink()).thenReturn(true);
when(item5d.stat.isSymlink()).thenReturn(true);
when(item5e.stat.isSymlink()).thenReturn(true);
when(item4.stat.getSymlink()).thenReturn(item3.path);
when(item5a.stat.getSymlink()).thenReturn(item1b.path);
when(item5b.stat.getSymlink()).thenReturn(item5.path);
when(item5d.stat.getSymlink()).thenReturn(item5c.path);
when(item5e.stat.getSymlink()).thenReturn(item5ca.path);
args.add(item1);
args.add(item2);
args.add(item3);
args.add(item4);
args.add(item5);
return args;
}
private PathData createPathData(String name) throws IOException {
Path path = new Path(name);
FileStatus fstat = mock(FileStatus.class);
when(fstat.getPath()).thenReturn(path);
when(fstat.toString()).thenReturn("fileStatus:" + name);
when(mockFs.getFileStatus(eq(path))).thenReturn(fstat);
PathData item = new PathData(path.toString(), conf);
return item;
}
private LinkedList<String> getArgs(String cmd) {
return new LinkedList<String>(Arrays.asList(cmd.split(" ")));
}
}
|
TestExpression
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsTests.java
|
{
"start": 128977,
"end": 129259
}
|
interface ____ {
String strategy();
}
@Id
@GeneratedValue(strategy = "AUTO")
private Long getId() {
return 42L;
}
/**
* Mimics org.springframework.security.config.annotation.authentication.configuration.EnableGlobalAuthentication
*/
@Retention(RUNTIME)
@
|
GeneratedValue
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/testdata/input/VariableDeclTemplateExample.java
|
{
"start": 772,
"end": 982
}
|
class ____ {
@SuppressWarnings("unused")
public void example() {
int a = Integer.valueOf("3");
Integer b = Integer.valueOf("3");
final int c = Integer.valueOf("3");
}
}
|
VariableDeclTemplateExample
|
java
|
jhy__jsoup
|
src/test/java/org/jsoup/parser/StreamParserTest.java
|
{
"start": 962,
"end": 2443
}
|
class ____ {
@Test
void canStream() {
String html = "<title>Test</title></head><div id=1>D1</div><div id=2>D2<p id=3><span>P One</p><p id=4>P Two</p></div><div id=5>D3<p id=6>P three</p>";
try (StreamParser parser = new StreamParser(Parser.htmlParser()).parse(html, "")) {
StringBuilder seen;
seen = new StringBuilder();
parser.stream().forEachOrdered(el -> trackSeen(el, seen));
assertEquals("title[Test];head+;div#1[D1]+;span[P One];p#3+;p#4[P Two];div#2[D2]+;p#6[P three];div#5[D3];body;html;#root;", seen.toString());
// checks expected order, and the + indicates that element had a next sibling at time of emission
}
}
@Test
void canStreamXml() {
String html = "<outmost><DIV id=1>D1</DIV><div id=2>D2<p id=3><span>P One</p><p id=4>P Two</p></div><div id=5>D3<p id=6>P three</p>";
try (StreamParser parser = new StreamParser(Parser.xmlParser()).parse(html, "")) {
StringBuilder seen;
seen = new StringBuilder();
parser.stream().forEachOrdered(el -> trackSeen(el, seen));
assertEquals("DIV#1[D1]+;span[P One];p#3+;p#4[P Two];div#2[D2]+;p#6[P three];div#5[D3];outmost;#root;", seen.toString());
// checks expected order, and the + indicates that element had a next sibling at time of emission
}
}
@Test void canIterate() {
// same as stream, just a different
|
StreamParserTest
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/registration/RelyingPartyRegistrations.java
|
{
"start": 1290,
"end": 9461
}
|
class ____ {
private static final ResourceLoader resourceLoader = new DefaultResourceLoader();
private RelyingPartyRegistrations() {
}
/**
* Return a {@link RelyingPartyRegistration.Builder} based off of the given SAML 2.0
* Asserting Party (IDP) metadata location.
*
* Valid locations can be classpath- or file-based or they can be HTTPS endpoints.
* Some valid endpoints might include:
*
* <pre>
* metadataLocation = "classpath:asserting-party-metadata.xml";
* metadataLocation = "file:asserting-party-metadata.xml";
* metadataLocation = "https://ap.example.org/metadata";
* </pre>
*
* Note that by default the registrationId is set to be the given metadata location,
* but this will most often not be sufficient. To complete the configuration, most
* applications will also need to provide a registrationId, like so:
*
* <pre>
* RelyingPartyRegistration registration = RelyingPartyRegistrations
* .fromMetadataLocation(metadataLocation)
* .registrationId("registration-id")
* .build();
* </pre>
*
* Also note that an {@code IDPSSODescriptor} typically only contains information
* about the asserting party. Thus, you will need to remember to still populate
* anything about the relying party, like any private keys the relying party will use
* for signing AuthnRequests.
* @param metadataLocation The classpath- or file-based locations or HTTPS endpoints
* of the asserting party metadata file
* @return the {@link RelyingPartyRegistration.Builder} for further configuration
*/
public static RelyingPartyRegistration.Builder fromMetadataLocation(String metadataLocation) {
try (InputStream source = resourceLoader.getResource(metadataLocation).getInputStream()) {
return fromMetadata(source);
}
catch (IOException ex) {
if (ex.getCause() instanceof Saml2Exception) {
throw (Saml2Exception) ex.getCause();
}
throw new Saml2Exception(ex);
}
}
/**
* Return a {@link RelyingPartyRegistration.Builder} based off of the given SAML 2.0
* Asserting Party (IDP) metadata.
*
* <p>
* This method is intended for scenarios when the metadata is looked up by a separate
* mechanism. One such example is when the metadata is stored in a database.
* </p>
*
* <p>
* <strong>The callers of this method are accountable for closing the
* {@code InputStream} source.</strong>
* </p>
*
* Note that by default the registrationId is set to be the given metadata location,
* but this will most often not be sufficient. To complete the configuration, most
* applications will also need to provide a registrationId, like so:
*
* <pre>
* String xml = fromDatabase();
* try (InputStream source = new ByteArrayInputStream(xml.getBytes())) {
* RelyingPartyRegistration registration = RelyingPartyRegistrations
* .fromMetadata(source)
* .registrationId("registration-id")
* .build();
* }
* </pre>
*
* Also note that an {@code IDPSSODescriptor} typically only contains information
* about the asserting party. Thus, you will need to remember to still populate
* anything about the relying party, like any private keys the relying party will use
* for signing AuthnRequests.
* @param source the {@link InputStream} source containing the asserting party
* metadata
* @return the {@link RelyingPartyRegistration.Builder} for further configuration
* @since 5.6
*/
public static RelyingPartyRegistration.Builder fromMetadata(InputStream source) {
return collectionFromMetadata(source).iterator().next();
}
/**
* Return a {@link Collection} of {@link RelyingPartyRegistration.Builder}s based off
* of the given SAML 2.0 Asserting Party (IDP) metadata location.
*
* Valid locations can be classpath- or file-based or they can be HTTPS endpoints.
* Some valid endpoints might include:
*
* <pre>
* metadataLocation = "classpath:asserting-party-metadata.xml";
* metadataLocation = "file:asserting-party-metadata.xml";
* metadataLocation = "https://ap.example.org/metadata";
* </pre>
*
* Note that by default the registrationId is set to be the given metadata location,
* but this will most often not be sufficient. To complete the configuration, most
* applications will also need to provide a registrationId, like so:
*
* <pre>
* Iterable<RelyingPartyRegistration> registrations = RelyingPartyRegistrations
* .collectionFromMetadataLocation(location).iterator();
* RelyingPartyRegistration one = registrations.next().registrationId("one").build();
* RelyingPartyRegistration two = registrations.next().registrationId("two").build();
* return new InMemoryRelyingPartyRegistrationRepository(one, two);
* </pre>
*
* Also note that an {@code IDPSSODescriptor} typically only contains information
* about the asserting party. Thus, you will need to remember to still populate
* anything about the relying party, like any private keys the relying party will use
* for signing AuthnRequests.
* @param location The classpath- or file-based locations or HTTPS endpoints of the
* asserting party metadata file
* @return the {@link Collection} of {@link RelyingPartyRegistration.Builder}s for
* further configuration
* @since 5.7
*/
public static Collection<RelyingPartyRegistration.Builder> collectionFromMetadataLocation(String location) {
try (InputStream source = resourceLoader.getResource(location).getInputStream()) {
return collectionFromMetadata(source);
}
catch (IOException ex) {
if (ex.getCause() instanceof Saml2Exception) {
throw (Saml2Exception) ex.getCause();
}
throw new Saml2Exception(ex);
}
}
/**
* Return a {@link Collection} of {@link RelyingPartyRegistration.Builder}s based off
* of the given SAML 2.0 Asserting Party (IDP) metadata.
*
* <p>
* This method is intended for scenarios when the metadata is looked up by a separate
* mechanism. One such example is when the metadata is stored in a database.
* </p>
*
* <p>
* <strong>The callers of this method are accountable for closing the
* {@code InputStream} source.</strong>
* </p>
*
* Note that by default the registrationId is set to be the given metadata location,
* but this will most often not be sufficient. To complete the configuration, most
* applications will also need to provide a registrationId, like so:
*
* <pre>
* String xml = fromDatabase();
* try (InputStream source = new ByteArrayInputStream(xml.getBytes())) {
* Iterator<RelyingPartyRegistration> registrations = RelyingPartyRegistrations
* .collectionFromMetadata(source).iterator();
* RelyingPartyRegistration one = registrations.next().registrationId("one").build();
* RelyingPartyRegistration two = registrations.next().registrationId("two").build();
* return new InMemoryRelyingPartyRegistrationRepository(one, two);
* }
* </pre>
*
* Also note that an {@code IDPSSODescriptor} typically only contains information
* about the asserting party. Thus, you will need to remember to still populate
* anything about the relying party, like any private keys the relying party will use
* for signing AuthnRequests.
* @param source the {@link InputStream} source containing the asserting party
* metadata
* @return the {@link Collection} of {@link RelyingPartyRegistration.Builder}s for
* further configuration
* @since 5.7
*/
public static Collection<RelyingPartyRegistration.Builder> collectionFromMetadata(InputStream source) {
Collection<RelyingPartyRegistration.Builder> builders = new ArrayList<>();
for (EntityDescriptor descriptor : OpenSamlMetadataUtils.descriptors(source)) {
if (descriptor.getIDPSSODescriptor(SAMLConstants.SAML20P_NS) != null) {
OpenSamlAssertingPartyDetails assertingParty = OpenSamlAssertingPartyDetails
.withEntityDescriptor(descriptor)
.build();
builders.add(RelyingPartyRegistration.withAssertingPartyMetadata(assertingParty));
}
}
if (builders.isEmpty()) {
throw new Saml2Exception("Metadata response is missing the necessary IDPSSODescriptor element");
}
return builders;
}
}
|
RelyingPartyRegistrations
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/LazyLoadingAndInheritanceTest.java
|
{
"start": 2519,
"end": 2758
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
public Long id;
public String name;
Contained() {
}
Contained(String name) {
this.name = name;
}
}
@Entity(name = "ContainedExtended")
static
|
Contained
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/PhysicalSlotRequestBulkCheckerImpl.java
|
{
"start": 9152,
"end": 9240
}
|
enum ____ {
PENDING,
FULFILLED,
TIMEOUT
}
}
|
TimeoutCheckResult
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/ImportAutoConfigurationImportSelectorTests.java
|
{
"start": 10988,
"end": 11094
}
|
class ____ {
}
@ImportAutoConfiguration
@UnrelatedOne
static
|
ImportMetaAutoConfigurationWithUnrelatedTwo
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringLoopCopyTest.java
|
{
"start": 1033,
"end": 1278
}
|
class ____ extends LoopCopyTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/SpringLoopCopyTest.xml");
}
}
|
SpringLoopCopyTest
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/events/FetchCommittedOffsetsEvent.java
|
{
"start": 1050,
"end": 1719
}
|
class ____ extends CompletableApplicationEvent<Map<TopicPartition, OffsetAndMetadata>> {
/**
* Partitions to retrieve committed offsets for.
*/
private final Set<TopicPartition> partitions;
public FetchCommittedOffsetsEvent(final Set<TopicPartition> partitions, final long deadlineMs) {
super(Type.FETCH_COMMITTED_OFFSETS, deadlineMs);
this.partitions = Collections.unmodifiableSet(partitions);
}
public Set<TopicPartition> partitions() {
return partitions;
}
@Override
public String toStringBase() {
return super.toStringBase() + ", partitions=" + partitions;
}
}
|
FetchCommittedOffsetsEvent
|
java
|
alibaba__nacos
|
client-basic/src/test/java/com/alibaba/nacos/client/auth/ram/injector/ConfigResourceInjectorTest.java
|
{
"start": 1562,
"end": 7196
}
|
class ____ {
private ConfigResourceInjector configResourceInjector;
private RamContext ramContext;
private RequestResource resource;
private String cachedSecurityCredentialsUrl;
private String cachedSecurityCredentials;
private StsCredential stsCredential;
@BeforeEach
void setUp() throws Exception {
configResourceInjector = new ConfigResourceInjector();
ramContext = new RamContext();
ramContext.setAccessKey(PropertyKeyConst.ACCESS_KEY);
ramContext.setSecretKey(PropertyKeyConst.SECRET_KEY);
resource = new RequestResource();
resource.setType(SignType.CONFIG);
resource.setNamespace("tenant");
resource.setGroup("group");
cachedSecurityCredentialsUrl = StsConfig.getInstance().getSecurityCredentialsUrl();
cachedSecurityCredentials = StsConfig.getInstance().getSecurityCredentials();
StsConfig.getInstance().setSecurityCredentialsUrl("");
StsConfig.getInstance().setSecurityCredentials("");
stsCredential = new StsCredential();
}
@AfterEach
void tearDown() throws NoSuchFieldException, IllegalAccessException {
StsConfig.getInstance().setSecurityCredentialsUrl(cachedSecurityCredentialsUrl);
StsConfig.getInstance().setSecurityCredentials(cachedSecurityCredentials);
clearForSts();
}
@Test
void testDoInjectWithFullResource() throws Exception {
LoginIdentityContext actual = new LoginIdentityContext();
configResourceInjector.doInject(resource, ramContext, actual);
assertEquals(3, actual.getAllKey().size());
assertEquals(PropertyKeyConst.ACCESS_KEY, actual.getParameter("Spas-AccessKey"));
assertTrue(actual.getAllKey().contains("Timestamp"));
assertTrue(actual.getAllKey().contains("Spas-Signature"));
}
@Test
void testDoInjectWithTenant() throws Exception {
resource.setGroup("");
LoginIdentityContext actual = new LoginIdentityContext();
configResourceInjector.doInject(resource, ramContext, actual);
assertEquals(3, actual.getAllKey().size());
assertEquals(PropertyKeyConst.ACCESS_KEY, actual.getParameter("Spas-AccessKey"));
assertTrue(actual.getAllKey().contains("Timestamp"));
assertTrue(actual.getAllKey().contains("Spas-Signature"));
}
@Test
void testDoInjectWithGroup() throws Exception {
resource.setNamespace("");
LoginIdentityContext actual = new LoginIdentityContext();
configResourceInjector.doInject(resource, ramContext, actual);
assertEquals(3, actual.getAllKey().size());
assertEquals(PropertyKeyConst.ACCESS_KEY, actual.getParameter("Spas-AccessKey"));
assertTrue(actual.getAllKey().contains("Timestamp"));
assertTrue(actual.getAllKey().contains("Spas-Signature"));
}
@Test
void testDoInjectWithoutResource() throws Exception {
resource = new RequestResource();
LoginIdentityContext actual = new LoginIdentityContext();
configResourceInjector.doInject(resource, ramContext, actual);
assertEquals(3, actual.getAllKey().size());
assertEquals(PropertyKeyConst.ACCESS_KEY, actual.getParameter("Spas-AccessKey"));
assertTrue(actual.getAllKey().contains("Timestamp"));
assertTrue(actual.getAllKey().contains("Spas-Signature"));
}
@Test
void testDoInjectForSts() throws NoSuchFieldException, IllegalAccessException {
prepareForSts();
LoginIdentityContext actual = new LoginIdentityContext();
configResourceInjector.doInject(resource, ramContext, actual);
assertEquals(4, actual.getAllKey().size());
assertEquals("test-sts-ak", actual.getParameter("Spas-AccessKey"));
assertTrue(actual.getAllKey().contains("Timestamp"));
assertTrue(actual.getAllKey().contains("Spas-Signature"));
assertTrue(actual.getAllKey().contains(IdentifyConstants.SECURITY_TOKEN_HEADER));
}
@Test
void testDoInjectForV4Sign() {
LoginIdentityContext actual = new LoginIdentityContext();
ramContext.setRegionId("cn-hangzhou");
configResourceInjector.doInject(resource, ramContext, actual);
assertEquals(4, actual.getAllKey().size());
assertEquals(PropertyKeyConst.ACCESS_KEY, actual.getParameter("Spas-AccessKey"));
assertEquals(RamConstants.V4, actual.getParameter(RamConstants.SIGNATURE_VERSION));
assertTrue(actual.getAllKey().contains("Timestamp"));
assertTrue(actual.getAllKey().contains("Spas-Signature"));
}
private void prepareForSts() throws NoSuchFieldException, IllegalAccessException {
StsConfig.getInstance().setSecurityCredentialsUrl("test");
Field field = StsCredentialHolder.class.getDeclaredField("stsCredential");
field.setAccessible(true);
field.set(StsCredentialHolder.getInstance(), stsCredential);
stsCredential.setAccessKeyId("test-sts-ak");
stsCredential.setAccessKeySecret("test-sts-sk");
stsCredential.setSecurityToken("test-sts-token");
stsCredential.setExpiration(new Date(System.currentTimeMillis() + 1000000));
}
private void clearForSts() throws NoSuchFieldException, IllegalAccessException {
StsConfig.getInstance().setSecurityCredentialsUrl(null);
Field field = StsCredentialHolder.class.getDeclaredField("stsCredential");
field.setAccessible(true);
field.set(StsCredentialHolder.getInstance(), null);
}
}
|
ConfigResourceInjectorTest
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/aroundconstruct/AroundConstructTest.java
|
{
"start": 1917,
"end": 2485
}
|
class ____ {
@AroundConstruct
void mySuperCoolAroundConstruct(InvocationContext ctx) throws Exception {
INTERCEPTOR_CALLED.set(true);
assertTrue(ctx.getParameters().length == 1);
Object param = ctx.getParameters()[0];
assertTrue(param instanceof MyDependency);
assertEquals(Arc.container().instance(MyDependency.class).get().getCreated(), ((MyDependency) param).getCreated());
ctx.proceed();
}
}
@MyTransactional
@Interceptor
public static
|
SimpleInterceptor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/inheritance/Fruit.java
|
{
"start": 444,
"end": 582
}
|
class ____ {
Long id;
@Id
@GeneratedValue
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
|
Fruit
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java
|
{
"start": 1165,
"end": 2984
}
|
class ____ {
/** The byte array of a UUID should be 16 */
public static final int BYTE_LENGTH = 16;
private static final int shiftWidth = 8;
/**
* @return Return clientId as byte[].
*/
public static byte[] getClientId() {
UUID uuid = UUID.randomUUID();
ByteBuffer buf = ByteBuffer.wrap(new byte[BYTE_LENGTH]);
buf.putLong(uuid.getMostSignificantBits());
buf.putLong(uuid.getLeastSignificantBits());
return buf.array();
}
/**
* @return Convert a clientId byte[] to string.
* @param clientId input clientId.
*/
public static String toString(byte[] clientId) {
// clientId can be null or an empty array
if (clientId == null || clientId.length == 0) {
return "";
}
// otherwise should be 16 bytes
Preconditions.checkArgument(clientId.length == BYTE_LENGTH);
long msb = getMsb(clientId);
long lsb = getLsb(clientId);
return (new UUID(msb, lsb)).toString();
}
public static long getMsb(byte[] clientId) {
long msb = 0;
for (int i = 0; i < BYTE_LENGTH/2; i++) {
msb = (msb << shiftWidth) | (clientId[i] & 0xff);
}
return msb;
}
public static long getLsb(byte[] clientId) {
long lsb = 0;
for (int i = BYTE_LENGTH/2; i < BYTE_LENGTH; i++) {
lsb = (lsb << shiftWidth) | (clientId[i] & 0xff);
}
return lsb;
}
/**
* @return Convert from clientId string byte[] representation of clientId.
* @param id input id.
*/
public static byte[] toBytes(String id) {
if (id == null || "".equals(id)) {
return new byte[0];
}
UUID uuid = UUID.fromString(id);
ByteBuffer buf = ByteBuffer.wrap(new byte[BYTE_LENGTH]);
buf.putLong(uuid.getMostSignificantBits());
buf.putLong(uuid.getLeastSignificantBits());
return buf.array();
}
}
|
ClientId
|
java
|
elastic__elasticsearch
|
qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java
|
{
"start": 1955,
"end": 2151
}
|
class ____ doing its job. Also needs to run in a separate JVM to other
* tests that extend ESTestCase for the same reason.
*/
@ThreadLeakFilters(filters = { GraalVMThreadsFilter.class })
public
|
from
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java
|
{
"start": 7402,
"end": 15081
}
|
class ____ {
// hard coded constants
int dictSize = 1000;
int minWordLen = 5;
int maxWordLen = 20;
int osInputBufferSize = 64 * 1024;
int osOutputBufferSize = 64 * 1024;
int fsInputBufferSizeNone = 0;
int fsInputBufferSizeLzo = 0;
int fsInputBufferSizeGz = 0;
int fsOutputBufferSizeNone = 1;
int fsOutputBufferSizeLzo = 1;
int fsOutputBufferSizeGz = 1;
String rootDir = GenericTestUtils.getTestDir().getAbsolutePath();
String file = "TestTFileSeek";
String compress = "gz";
int minKeyLen = 10;
int maxKeyLen = 50;
int minValLength = 100;
int maxValLength = 200;
int minBlockSize = 64 * 1024;
int fsOutputBufferSize = 1;
int fsInputBufferSize = 0;
long fileSize = 3 * 1024 * 1024;
long seekCount = 1000;
long seed;
static final int OP_CREATE = 1;
static final int OP_READ = 2;
int op = OP_CREATE | OP_READ;
boolean proceed = false;
public MyOptions(String[] args) {
seed = System.nanoTime();
try {
Options opts = buildOptions();
CommandLineParser parser = new DefaultParser();
CommandLine line = parser.parse(opts, args, true);
processOptions(line, opts);
validateOptions();
}
catch (ParseException e) {
System.out.println(e.getMessage());
System.out.println("Try \"--help\" option for details.");
setStopProceed();
}
}
public boolean proceed() {
return proceed;
}
private Options buildOptions() {
Option compress =
Option.builder("c").longOpt("compress").argName("[none|lzo|gz]")
.hasArg().desc("compression scheme").build();
Option fileSize =
Option.builder("s").longOpt("file-size").argName("size-in-MB")
.hasArg().desc("target size of the file (in MB).").build();
Option fsInputBufferSz =
Option.builder("i").longOpt("fs-input-buffer").argName("size")
.hasArg().desc("size of the file system input buffer (in bytes).").build();
Option fsOutputBufferSize =
Option.builder("o").longOpt("fs-output-buffer").argName("size")
.hasArg().desc("size of the file system output buffer (in bytes).").build();
Option keyLen =
Option.builder("k").longOpt("key-length").argName("min,max")
.hasArg().desc("the length range of the key (in bytes)").build();
Option valueLen =
Option.builder("v").longOpt("value-length").argName("min,max")
.hasArg().desc("the length range of the value (in bytes)").build();
Option blockSz =
Option.builder("b").longOpt("block").argName("size-in-KB").hasArg()
.desc("minimum block size (in KB)").build();
Option seed =
Option.builder("S").longOpt("seed").argName("long-int").hasArg()
.desc("specify the seed").build();
Option operation =
Option.builder("x").longOpt("operation").argName("r|w|rw").hasArg()
.desc("action: seek-only, create-only, seek-after-create").build();
Option rootDir =
Option.builder("r").longOpt("root-dir").argName("path").hasArg()
.desc("specify root directory where files will be created.").build();
Option file =
Option.builder("f").longOpt("file").argName("name").hasArg()
.desc("specify the file name to be created or read.").build();
Option seekCount =
Option.builder("n").longOpt("seek").argName("count").hasArg()
.desc("specify how many seek operations we perform (requires -x r or -x rw.").build();
Option help =
Option.builder("h").longOpt("help").hasArg(false)
.desc("show this screen").build();
return new Options().addOption(compress).addOption(fileSize).addOption(
fsInputBufferSz).addOption(fsOutputBufferSize).addOption(keyLen)
.addOption(blockSz).addOption(rootDir).addOption(valueLen).addOption(
operation).addOption(seekCount).addOption(file).addOption(help);
}
private void processOptions(CommandLine line, Options opts)
throws ParseException {
// --help -h and --version -V must be processed first.
if (line.hasOption('h')) {
HelpFormatter formatter = new HelpFormatter();
System.out.println("TFile and SeqFile benchmark.");
System.out.println();
formatter.printHelp(100,
"java ... TestTFileSeqFileComparison [options]",
"\nSupported options:", opts, "");
return;
}
if (line.hasOption('c')) {
compress = line.getOptionValue('c');
}
if (line.hasOption('d')) {
dictSize = Integer.parseInt(line.getOptionValue('d'));
}
if (line.hasOption('s')) {
fileSize = Long.parseLong(line.getOptionValue('s')) * 1024 * 1024;
}
if (line.hasOption('i')) {
fsInputBufferSize = Integer.parseInt(line.getOptionValue('i'));
}
if (line.hasOption('o')) {
fsOutputBufferSize = Integer.parseInt(line.getOptionValue('o'));
}
if (line.hasOption('n')) {
seekCount = Integer.parseInt(line.getOptionValue('n'));
}
if (line.hasOption('k')) {
IntegerRange ir = IntegerRange.parse(line.getOptionValue('k'));
minKeyLen = ir.from();
maxKeyLen = ir.to();
}
if (line.hasOption('v')) {
IntegerRange ir = IntegerRange.parse(line.getOptionValue('v'));
minValLength = ir.from();
maxValLength = ir.to();
}
if (line.hasOption('b')) {
minBlockSize = Integer.parseInt(line.getOptionValue('b')) * 1024;
}
if (line.hasOption('r')) {
rootDir = line.getOptionValue('r');
}
if (line.hasOption('f')) {
file = line.getOptionValue('f');
}
if (line.hasOption('S')) {
seed = Long.parseLong(line.getOptionValue('S'));
}
if (line.hasOption('x')) {
String strOp = line.getOptionValue('x');
if (strOp.equals("r")) {
op = OP_READ;
}
else if (strOp.equals("w")) {
op = OP_CREATE;
}
else if (strOp.equals("rw")) {
op = OP_CREATE | OP_READ;
}
else {
throw new ParseException("Unknown action specifier: " + strOp);
}
}
proceed = true;
}
private void validateOptions() throws ParseException {
if (!compress.equals("none") && !compress.equals("lzo")
&& !compress.equals("gz")) {
throw new ParseException("Unknown compression scheme: " + compress);
}
if (minKeyLen >= maxKeyLen) {
throw new ParseException(
"Max key length must be greater than min key length.");
}
if (minValLength >= maxValLength) {
throw new ParseException(
"Max value length must be greater than min value length.");
}
if (minWordLen >= maxWordLen) {
throw new ParseException(
"Max word length must be greater than min word length.");
}
return;
}
private void setStopProceed() {
proceed = false;
}
public boolean doCreate() {
return (op & OP_CREATE) != 0;
}
public boolean doRead() {
return (op & OP_READ) != 0;
}
}
public static void main(String[] argv) throws IOException {
TestTFileSeek testCase = new TestTFileSeek();
MyOptions options = new MyOptions(argv);
if (options.proceed == false) {
return;
}
testCase.options = options;
testCase.setUp();
testCase.testSeeks();
testCase.tearDown();
}
}
|
MyOptions
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/multipart/FormData.java
|
{
"start": 303,
"end": 1092
}
|
class ____ extends FormDataBase {
@RestForm
// don't set a part type, use the default
private String name;
@RestForm
@PartType(MediaType.TEXT_PLAIN)
private Status status;
@RestForm("htmlFile")
private FileUpload htmlPart;
@RestForm("xmlFile")
public Path xmlPart;
@RestForm
public File txtFile;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public FileUpload getHtmlPart() {
return htmlPart;
}
public void setHtmlPart(FileUpload htmlPart) {
this.htmlPart = htmlPart;
}
}
|
FormData
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/TokenResponse.java
|
{
"start": 125,
"end": 572
}
|
class ____ {
private final String accessToken;
private final Integer expiresIn;
public TokenResponse(@JsonProperty("access_token") String accessToken, @JsonProperty("expires_in") Integer expiresIn) {
this.accessToken = accessToken;
this.expiresIn = expiresIn;
}
public String getAccessToken() {
return accessToken;
}
public Integer getExpiresIn() {
return expiresIn;
}
}
|
TokenResponse
|
java
|
quarkusio__quarkus
|
extensions/hibernate-search-orm-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/deployment/HibernateSearchIntegrationRuntimeConfiguredBuildItem.java
|
{
"start": 224,
"end": 1444
}
|
class ____ extends MultiBuildItem {
private final String integrationName;
private final String persistenceUnitName;
private final HibernateOrmIntegrationRuntimeInitListener initListener;
public HibernateSearchIntegrationRuntimeConfiguredBuildItem(String integrationName, String persistenceUnitName,
HibernateOrmIntegrationRuntimeInitListener initListener) {
if (integrationName == null) {
throw new IllegalArgumentException("name cannot be null");
}
this.integrationName = integrationName;
if (persistenceUnitName == null) {
throw new IllegalArgumentException("persistenceUnitName cannot be null");
}
this.persistenceUnitName = persistenceUnitName;
this.initListener = initListener;
}
@Override
public String toString() {
return HibernateSearchIntegrationRuntimeConfiguredBuildItem.class.getSimpleName() + " [" + integrationName + "]";
}
public HibernateOrmIntegrationRuntimeInitListener getInitListener() {
return initListener;
}
public String getPersistenceUnitName() {
return persistenceUnitName;
}
}
|
HibernateSearchIntegrationRuntimeConfiguredBuildItem
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java
|
{
"start": 5596,
"end": 7552
}
|
interface ____ extends ActionListener<TrainedModelAssignment> {
default void onTimeout(TimeValue timeout) {
onFailure(
new ModelDeploymentTimeoutException(
format(
"Timed out after [%s] waiting for trained model deployment to start. "
+ "Use the trained model stats API to track the state of the deployment and try again once it has started.",
timeout
)
)
);
}
}
protected void waitForNewMasterAndRetry(
ClusterStateObserver observer,
ActionType<AcknowledgedResponse> action,
ActionRequest request,
ActionListener<AcknowledgedResponse> listener
) {
observer.waitForNextChange(new ClusterStateObserver.Listener() {
@Override
public void onNewClusterState(ClusterState state) {
client.execute(action, request, listener);
}
@Override
public void onClusterServiceClose() {
logger.warn("node closed while execution action [{}] for request [{}]", action.name(), request);
listener.onFailure(new NodeClosedException(clusterService.localNode()));
}
@Override
public void onTimeout(TimeValue timeout) {
// we wait indefinitely for a new master
assert false;
}
}, ClusterStateObserver.NON_NULL_MASTER_PREDICATE);
}
private static final Class<?>[] MASTER_CHANNEL_EXCEPTIONS = new Class<?>[] {
NotMasterException.class,
ConnectTransportException.class,
FailedToCommitClusterStateException.class };
private static boolean isMasterChannelException(Exception exp) {
return org.elasticsearch.ExceptionsHelper.unwrap(exp, MASTER_CHANNEL_EXCEPTIONS) != null;
}
}
|
WaitForAssignmentListener
|
java
|
apache__kafka
|
connect/runtime/src/test/java/org/apache/kafka/connect/cli/AbstractConnectCliTest.java
|
{
"start": 7956,
"end": 8120
}
|
class ____ extends RuntimeException {
ExpectedException() {
super("Expected exception, createConfig succeeded");
}
}
}
|
ExpectedException
|
java
|
apache__spark
|
sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedValuesReader.java
|
{
"start": 3186,
"end": 3633
}
|
interface ____ {
void write(WritableColumnVector c, int rowId, ByteBuffer val, int length);
static void writeArrayByteBuffer(WritableColumnVector c, int rowId, ByteBuffer val,
int length) {
c.putByteArray(rowId,
val.array(),
val.arrayOffset() + val.position(),
length);
}
static void skipWrite(WritableColumnVector c, int rowId, ByteBuffer val, int length) { }
}
}
|
ByteBufferOutputWriter
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-jaxrs/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/jaxrs/filter/WriterInterceptorContextImpl.java
|
{
"start": 1372,
"end": 2886
}
|
class ____ extends InterceptorContextImpl implements WriterInterceptorContext {
private final HttpResponse response;
private final Result result;
private MultivaluedMap<String, Object> headers;
public WriterInterceptorContextImpl(HttpRequest request, HttpResponse response, Result result) {
super(request);
this.response = response;
this.result = result;
}
@Override
public void proceed() throws WebApplicationException {}
@Override
public Object getEntity() {
return result.getValue();
}
@Override
public void setEntity(Object entity) {
result.setValue(entity);
}
@Override
public OutputStream getOutputStream() {
return response.outputStream();
}
@Override
public void setOutputStream(OutputStream os) {
response.setOutputStream(os);
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public MultivaluedMap<String, Object> getHeaders() {
MultivaluedMap<String, Object> headers = this.headers;
if (headers == null) {
headers = new MultivaluedMapWrapper(response.headers());
this.headers = headers;
}
return headers;
}
@Override
public MediaType getMediaType() {
return Helper.toMediaType(response.mediaType());
}
@Override
public void setMediaType(MediaType mediaType) {
response.setContentType(Helper.toString(mediaType));
}
}
|
WriterInterceptorContextImpl
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/runtime/datasource/ReactiveTransactionalHyperLogLogCommandsImpl.java
|
{
"start": 306,
"end": 1423
}
|
class ____<K, V> extends AbstractTransactionalCommands
implements ReactiveTransactionalHyperLogLogCommands<K, V> {
private final ReactiveHyperLogLogCommandsImpl<K, V> reactive;
public ReactiveTransactionalHyperLogLogCommandsImpl(ReactiveTransactionalRedisDataSource ds,
ReactiveHyperLogLogCommandsImpl<K, V> reactive, TransactionHolder tx) {
super(ds, tx);
this.reactive = reactive;
}
@Override
public Uni<Void> pfadd(K key, V... values) {
this.tx.enqueue(Response::toBoolean);
return this.reactive._pfadd(key, values).invoke(this::queuedOrDiscard).replaceWithVoid();
}
@Override
public Uni<Void> pfmerge(K destkey, K... sourcekeys) {
this.tx.enqueue(resp -> null);
return this.reactive._pfmerge(destkey, sourcekeys).invoke(this::queuedOrDiscard).replaceWithVoid();
}
@Override
public Uni<Void> pfcount(K... keys) {
this.tx.enqueue(Response::toLong);
return this.reactive._pfcount(keys).invoke(this::queuedOrDiscard).replaceWithVoid();
}
}
|
ReactiveTransactionalHyperLogLogCommandsImpl
|
java
|
apache__dubbo
|
dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/report/support/AbstractMetadataReport.java
|
{
"start": 12804,
"end": 21289
}
|
class ____ implements Runnable {
private long version;
private SaveProperties(long version) {
this.version = version;
}
@Override
public void run() {
doSaveProperties(version);
}
}
@Override
public void storeProviderMetadata(
MetadataIdentifier providerMetadataIdentifier, ServiceDefinition serviceDefinition) {
if (syncReport) {
storeProviderMetadataTask(providerMetadataIdentifier, serviceDefinition);
} else {
reportCacheExecutor.execute(() -> storeProviderMetadataTask(providerMetadataIdentifier, serviceDefinition));
}
}
private void storeProviderMetadataTask(
MetadataIdentifier providerMetadataIdentifier, ServiceDefinition serviceDefinition) {
MetadataEvent metadataEvent = MetadataEvent.toServiceSubscribeEvent(
applicationModel, providerMetadataIdentifier.getUniqueServiceName());
MetricsEventBus.post(
metadataEvent,
() -> {
boolean result = true;
try {
if (logger.isInfoEnabled()) {
logger.info("[METADATA_REGISTER] store provider metadata. Identifier : "
+ providerMetadataIdentifier + "; definition: " + serviceDefinition);
}
allMetadataReports.put(providerMetadataIdentifier, serviceDefinition);
failedReports.remove(providerMetadataIdentifier);
String data = JsonUtils.toJson(serviceDefinition);
doStoreProviderMetadata(providerMetadataIdentifier, data);
saveProperties(providerMetadataIdentifier, data, true, !syncReport);
} catch (Exception e) {
// retry again. If failed again, throw exception.
failedReports.put(providerMetadataIdentifier, serviceDefinition);
metadataReportRetry.startRetryTask();
logger.error(
PROXY_FAILED_EXPORT_SERVICE,
"",
"",
"Failed to put provider metadata " + providerMetadataIdentifier + " in "
+ serviceDefinition + ", cause: " + e.getMessage(),
e);
result = false;
}
return result;
},
aBoolean -> aBoolean);
}
@Override
public void storeConsumerMetadata(
MetadataIdentifier consumerMetadataIdentifier, Map<String, String> serviceParameterMap) {
if (syncReport) {
storeConsumerMetadataTask(consumerMetadataIdentifier, serviceParameterMap);
} else {
reportCacheExecutor.execute(
() -> storeConsumerMetadataTask(consumerMetadataIdentifier, serviceParameterMap));
}
}
protected void storeConsumerMetadataTask(
MetadataIdentifier consumerMetadataIdentifier, Map<String, String> serviceParameterMap) {
try {
if (logger.isInfoEnabled()) {
logger.info("[METADATA_REGISTER] store consumer metadata. Identifier : " + consumerMetadataIdentifier
+ "; definition: " + serviceParameterMap);
}
allMetadataReports.put(consumerMetadataIdentifier, serviceParameterMap);
failedReports.remove(consumerMetadataIdentifier);
String data = JsonUtils.toJson(serviceParameterMap);
doStoreConsumerMetadata(consumerMetadataIdentifier, data);
saveProperties(consumerMetadataIdentifier, data, true, !syncReport);
} catch (Exception e) {
// retry again. If failed again, throw exception.
failedReports.put(consumerMetadataIdentifier, serviceParameterMap);
metadataReportRetry.startRetryTask();
logger.error(
PROXY_FAILED_EXPORT_SERVICE,
"",
"",
"Failed to put consumer metadata " + consumerMetadataIdentifier + "; " + serviceParameterMap
+ ", cause: " + e.getMessage(),
e);
}
}
@Override
public void destroy() {
if (reportCacheExecutor != null) {
reportCacheExecutor.shutdown();
}
if (reportTimerScheduler != null) {
reportTimerScheduler.shutdown();
}
if (metadataReportRetry != null) {
metadataReportRetry.destroy();
metadataReportRetry = null;
}
}
@Override
public void saveServiceMetadata(ServiceMetadataIdentifier metadataIdentifier, URL url) {
if (syncReport) {
doSaveMetadata(metadataIdentifier, url);
} else {
reportCacheExecutor.execute(() -> doSaveMetadata(metadataIdentifier, url));
}
}
@Override
public void removeServiceMetadata(ServiceMetadataIdentifier metadataIdentifier) {
if (syncReport) {
doRemoveMetadata(metadataIdentifier);
} else {
reportCacheExecutor.execute(() -> doRemoveMetadata(metadataIdentifier));
}
}
@Override
public List<String> getExportedURLs(ServiceMetadataIdentifier metadataIdentifier) {
// TODO, fallback to local cache
return doGetExportedURLs(metadataIdentifier);
}
@Override
public void saveSubscribedData(SubscriberMetadataIdentifier subscriberMetadataIdentifier, Set<String> urls) {
if (syncReport) {
doSaveSubscriberData(subscriberMetadataIdentifier, JsonUtils.toJson(urls));
} else {
reportCacheExecutor.execute(
() -> doSaveSubscriberData(subscriberMetadataIdentifier, JsonUtils.toJson(urls)));
}
}
@Override
public List<String> getSubscribedURLs(SubscriberMetadataIdentifier subscriberMetadataIdentifier) {
String content = doGetSubscribedURLs(subscriberMetadataIdentifier);
return JsonUtils.toJavaList(content, String.class);
}
String getProtocol(URL url) {
String protocol = url.getSide();
protocol = protocol == null ? url.getProtocol() : protocol;
return protocol;
}
/**
* @return if need to continue
*/
public boolean retry() {
return doHandleMetadataCollection(failedReports);
}
@Override
public boolean shouldReportDefinition() {
return reportDefinition;
}
@Override
public boolean shouldReportMetadata() {
return reportMetadata;
}
private boolean doHandleMetadataCollection(Map<MetadataIdentifier, Object> metadataMap) {
if (metadataMap.isEmpty()) {
return true;
}
Iterator<Map.Entry<MetadataIdentifier, Object>> iterable =
metadataMap.entrySet().iterator();
while (iterable.hasNext()) {
Map.Entry<MetadataIdentifier, Object> item = iterable.next();
if (PROVIDER_SIDE.equals(item.getKey().getSide())) {
this.storeProviderMetadata(item.getKey(), (FullServiceDefinition) item.getValue());
} else if (CONSUMER_SIDE.equals(item.getKey().getSide())) {
this.storeConsumerMetadata(item.getKey(), (Map) item.getValue());
}
}
return false;
}
/**
* not private. just for unittest.
*/
void publishAll() {
logger.info("start to publish all metadata.");
this.doHandleMetadataCollection(allMetadataReports);
}
/**
* between 2:00 am to 6:00 am, the time is random.
*
* @return
*/
long calculateStartTime() {
Calendar calendar = Calendar.getInstance();
long nowMill = calendar.getTimeInMillis();
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
long subtract = calendar.getTimeInMillis() + ONE_DAY_IN_MILLISECONDS - nowMill;
return subtract
+ (FOUR_HOURS_IN_MILLISECONDS / 2)
+ ThreadLocalRandom.current().nextInt(FOUR_HOURS_IN_MILLISECONDS);
}
|
SaveProperties
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/annotation/SpyAnnotationTest.java
|
{
"start": 3184,
"end": 3750
}
|
class ____ {
@Spy NoValidConstructor noValidConstructor;
}
try {
MockitoAnnotations.openMocks(new FailingSpy());
fail();
} catch (MockitoException e) {
assertThat(e.getMessage())
.contains("Please ensure that the type")
.contains(NoValidConstructor.class.getSimpleName())
.contains("has a no-arg constructor");
}
}
@Test
public void should_report_when_constructor_is_explosive() throws Exception {
|
FailingSpy
|
java
|
netty__netty
|
transport-native-io_uring/src/test/java/io/netty/channel/uring/IoUringBufferRingSocketDataReadInitialStateTest.java
|
{
"start": 1056,
"end": 1871
}
|
class ____ extends SocketDataReadInitialStateTest {
@BeforeAll
public static void loadJNI() {
assumeTrue(IoUring.isAvailable());
assumeTrue(IoUring.isRegisterBufferRingSupported());
}
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return IoUringSocketTestPermutation.INSTANCE.socket();
}
@Override
protected void configure(ServerBootstrap sb, Bootstrap cb, ByteBufAllocator allocator) {
super.configure(sb, cb, allocator);
sb.childOption(IoUringChannelOption.IO_URING_BUFFER_GROUP_ID, IoUringSocketTestPermutation.BGID);
cb.option(IoUringChannelOption.IO_URING_BUFFER_GROUP_ID, IoUringSocketTestPermutation.BGID);
}
}
|
IoUringBufferRingSocketDataReadInitialStateTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/transaction/MethodLevelTransactionalSpringTests.java
|
{
"start": 2219,
"end": 2609
}
|
class ____ tests usage of {@code @Transactional} defined
* at the <strong>method level</strong>.
*
* @author Sam Brannen
* @since 2.5
* @see ClassLevelTransactionalSpringTests
*/
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DirtiesContextTestExecutionListener.class,
TransactionalTestExecutionListener.class })
@TestInstance(Lifecycle.PER_CLASS)
|
specifically
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/function/array/ArrayRemoveIndexTest.java
|
{
"start": 1610,
"end": 5376
}
|
class ____ {
@BeforeEach
public void prepareData(SessionFactoryScope scope) {
scope.inTransaction( em -> {
em.persist( new EntityWithArrays( 1L, new String[]{} ) );
em.persist( new EntityWithArrays( 2L, new String[]{ "abc", null, "def" } ) );
em.persist( new EntityWithArrays( 3L, null ) );
} );
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testRemove(SessionFactoryScope scope) {
scope.inSession( em -> {
//tag::hql-array-remove-index-example[]
List<Tuple> results = em.createQuery( "select e.id, array_remove_index(e.theArray, 1) from EntityWithArrays e order by e.id", Tuple.class )
.getResultList();
//end::hql-array-remove-index-example[]
assertEquals( 3, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertArrayEquals( new String[] {}, results.get( 0 ).get( 1, String[].class ) );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertArrayEquals( new String[] { null, "def" }, results.get( 1 ).get( 1, String[].class ) );
assertEquals( 3L, results.get( 2 ).get( 0 ) );
assertNull( results.get( 2 ).get( 1, String[].class ) );
} );
}
@Test
public void testRemoveNullIndex(SessionFactoryScope scope) {
scope.inSession( em -> {
List<Tuple> results = em.createQuery( "select e.id, array_remove_index(e.theArray, null) from EntityWithArrays e order by e.id", Tuple.class )
.getResultList();
assertEquals( 3, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertArrayEquals( new String[] {}, results.get( 0 ).get( 1, String[].class ) );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertArrayEquals( new String[] { "abc", null, "def" }, results.get( 1 ).get( 1, String[].class ) );
assertEquals( 3L, results.get( 2 ).get( 0 ) );
assertNull( results.get( 2 ).get( 1, String[].class ) );
} );
}
@Test
public void testRemoveNonExisting(SessionFactoryScope scope) {
scope.inSession( em -> {
List<Tuple> results = em.createQuery( "select e.id, array_remove_index(e.theArray, 10000) from EntityWithArrays e order by e.id", Tuple.class )
.getResultList();
assertEquals( 3, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertArrayEquals( new String[] {}, results.get( 0 ).get( 1, String[].class ) );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertArrayEquals( new String[] { "abc", null, "def" }, results.get( 1 ).get( 1, String[].class ) );
assertEquals( 3L, results.get( 2 ).get( 0 ) );
assertNull( results.get( 2 ).get( 1, String[].class ) );
} );
}
@Test
public void testNodeBuilderArray(SessionFactoryScope scope) {
scope.inSession( em -> {
final NodeBuilder cb = (NodeBuilder) em.getCriteriaBuilder();
final JpaCriteriaQuery<Tuple> cq = cb.createTupleQuery();
final JpaRoot<EntityWithArrays> root = cq.from( EntityWithArrays.class );
cq.multiselect(
root.get( "id" ),
cb.arrayRemoveIndex( root.<String[]>get( "theArray" ), cb.literal( 1 ) ),
cb.arrayRemoveIndex( root.get( "theArray" ), 1 )
);
em.createQuery( cq ).getResultList();
} );
}
@Test
public void testNodeBuilderCollection(SessionFactoryScope scope) {
scope.inSession( em -> {
final NodeBuilder cb = (NodeBuilder) em.getCriteriaBuilder();
final JpaCriteriaQuery<Tuple> cq = cb.createTupleQuery();
final JpaRoot<EntityWithArrays> root = cq.from( EntityWithArrays.class );
cq.multiselect(
root.get( "id" ),
cb.collectionRemoveIndex( root.<Collection<String>>get( "theCollection" ), cb.literal( 1 ) ),
cb.collectionRemoveIndex( root.get( "theCollection" ), 1 )
);
em.createQuery( cq ).getResultList();
} );
}
}
|
ArrayRemoveIndexTest
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/helpdesk/Account.java
|
{
"start": 526,
"end": 1704
}
|
class ____ {
private Integer id;
private Status loginStatus;
private Status systemAccessStatus;
private Status serviceStatus;
public Account() {
}
public Account(
Integer id,
Status loginStatus,
Status systemAccessStatus,
Status serviceStatus) {
this.id = id;
this.loginStatus = loginStatus;
this.systemAccessStatus = systemAccessStatus;
this.serviceStatus = serviceStatus;
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Enumerated( EnumType.ORDINAL )
public Status getLoginStatus() {
return loginStatus;
}
public void setLoginStatus(Status loginStatus) {
this.loginStatus = loginStatus;
}
@Enumerated( EnumType.STRING )
public Status getSystemAccessStatus() {
return systemAccessStatus;
}
public void setSystemAccessStatus(Status systemAccessStatus) {
this.systemAccessStatus = systemAccessStatus;
}
@Convert( converter = ServiceStatusConverter.class )
public Status getServiceStatus() {
return serviceStatus;
}
public void setServiceStatus(Status serviceStatus) {
this.serviceStatus = serviceStatus;
}
@Converter( autoApply = false )
private static
|
Account
|
java
|
apache__maven
|
impl/maven-core/src/test/java/org/apache/maven/configuration/internal/EnhancedCompositeBeanHelperTest.java
|
{
"start": 8267,
"end": 8329
}
|
class ____ testing property setting.
*/
public static
|
for
|
java
|
google__dagger
|
javatests/dagger/hilt/android/ViewModelAssistedTest.java
|
{
"start": 9367,
"end": 10080
}
|
class ____
extends Hilt_ViewModelAssistedTest_TestIncompatibleFactoriesActivity {
MyViewModel vm;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
vm =
new ViewModelProvider(
getViewModelStore(),
getDefaultViewModelProviderFactory(),
HiltViewModelExtensions.withCreationCallback(
getDefaultViewModelCreationExtras(),
(MyViewModel.AnotherFactory factory) -> factory.create("foo")))
.get(MyViewModel.class);
}
}
@AndroidEntryPoint(Fragment.class)
public static
|
TestIncompatibleFactoriesActivity
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/builder/ToStringStyle.java
|
{
"start": 15102,
"end": 15182
}
|
class ____ and no identity hash code.
*
* <p>
* This is an inner
|
name
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/validation/beanvalidation/MethodValidationAdapter.java
|
{
"start": 8083,
"end": 9875
}
|
class ____ the method,
* or for an AOP proxy without a target (with all behavior in advisors), also
* check on proxied interfaces.
*/
@Override
public Class<?>[] determineValidationGroups(Object target, Method method) {
Validated validatedAnn = AnnotationUtils.findAnnotation(method, Validated.class);
if (validatedAnn == null) {
if (AopUtils.isAopProxy(target)) {
for (Class<?> type : AopProxyUtils.proxiedUserInterfaces(target)) {
validatedAnn = AnnotationUtils.findAnnotation(type, Validated.class);
if (validatedAnn != null) {
break;
}
}
}
else {
validatedAnn = AnnotationUtils.findAnnotation(target.getClass(), Validated.class);
}
}
return (validatedAnn != null ? validatedAnn.value() : new Class<?>[0]);
}
@Override
public final MethodValidationResult validateArguments(
Object target, Method method, MethodParameter @Nullable [] parameters,
@Nullable Object[] arguments, Class<?>[] groups) {
Set<ConstraintViolation<Object>> violations =
invokeValidatorForArguments(target, method, arguments, groups);
if (violations.isEmpty()) {
return emptyValidationResult;
}
return adaptViolations(target, method, violations,
i -> (parameters != null ? parameters[i] : initMethodParameter(method, i)),
i -> arguments[i]);
}
/**
* Invoke the validator, and return the resulting violations.
*/
public final Set<ConstraintViolation<Object>> invokeValidatorForArguments(
Object target, Method method, @Nullable Object[] arguments, Class<?>[] groups) {
ExecutableValidator execVal = this.validator.get().forExecutables();
try {
return execVal.validateParameters(target, method, arguments, groups);
}
catch (IllegalArgumentException ex) {
// Probably a generic type mismatch between
|
of
|
java
|
spring-projects__spring-boot
|
module/spring-boot-tomcat/src/main/java/org/springframework/boot/tomcat/autoconfigure/TomcatServerProperties.java
|
{
"start": 10799,
"end": 15742
}
|
class ____ {
/**
* Enable access log.
*/
private boolean enabled;
/**
* Whether logging of the request will only be enabled if
* "ServletRequest.getAttribute(conditionIf)" does not yield null.
*/
private @Nullable String conditionIf;
/**
* Whether logging of the request will only be enabled if
* "ServletRequest.getAttribute(conditionUnless)" yield null.
*/
private @Nullable String conditionUnless;
/**
* Format pattern for access logs.
*/
private String pattern = "common";
/**
* Directory in which log files are created. Can be absolute or relative to the
* Tomcat base dir.
*/
private String directory = "logs";
/**
* Log file name prefix.
*/
protected String prefix = "access_log";
/**
* Log file name suffix.
*/
private String suffix = ".log";
/**
* Character set used by the log file. Default to the system default character
* set.
*/
private @Nullable String encoding;
/**
* Locale used to format timestamps in log entries and in log file name suffix.
* Default to the default locale of the Java process.
*/
private @Nullable String locale;
/**
* Whether to check for log file existence so it can be recreated if an external
* process has renamed it.
*/
private boolean checkExists;
/**
* Whether to enable access log rotation.
*/
private boolean rotate = true;
/**
* Whether to defer inclusion of the date stamp in the file name until rotate
* time.
*/
private boolean renameOnRotate;
/**
* Number of days to retain the access log files before they are removed.
*/
private int maxDays = -1;
/**
* Date format to place in the log file name.
*/
private String fileDateFormat = ".yyyy-MM-dd";
/**
* Whether to use IPv6 canonical representation format as defined by RFC 5952.
*/
private boolean ipv6Canonical;
/**
* Set request attributes for the IP address, Hostname, protocol, and port used
* for the request.
*/
private boolean requestAttributesEnabled;
/**
* Whether to buffer output such that it is flushed only periodically.
*/
private boolean buffered = true;
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public @Nullable String getConditionIf() {
return this.conditionIf;
}
public void setConditionIf(@Nullable String conditionIf) {
this.conditionIf = conditionIf;
}
public @Nullable String getConditionUnless() {
return this.conditionUnless;
}
public void setConditionUnless(@Nullable String conditionUnless) {
this.conditionUnless = conditionUnless;
}
public String getPattern() {
return this.pattern;
}
public void setPattern(String pattern) {
this.pattern = pattern;
}
public String getDirectory() {
return this.directory;
}
public void setDirectory(String directory) {
this.directory = directory;
}
public String getPrefix() {
return this.prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getSuffix() {
return this.suffix;
}
public void setSuffix(String suffix) {
this.suffix = suffix;
}
public @Nullable String getEncoding() {
return this.encoding;
}
public void setEncoding(@Nullable String encoding) {
this.encoding = encoding;
}
public @Nullable String getLocale() {
return this.locale;
}
public void setLocale(@Nullable String locale) {
this.locale = locale;
}
public boolean isCheckExists() {
return this.checkExists;
}
public void setCheckExists(boolean checkExists) {
this.checkExists = checkExists;
}
public boolean isRotate() {
return this.rotate;
}
public void setRotate(boolean rotate) {
this.rotate = rotate;
}
public boolean isRenameOnRotate() {
return this.renameOnRotate;
}
public void setRenameOnRotate(boolean renameOnRotate) {
this.renameOnRotate = renameOnRotate;
}
public int getMaxDays() {
return this.maxDays;
}
public void setMaxDays(int maxDays) {
this.maxDays = maxDays;
}
public String getFileDateFormat() {
return this.fileDateFormat;
}
public void setFileDateFormat(String fileDateFormat) {
this.fileDateFormat = fileDateFormat;
}
public boolean isIpv6Canonical() {
return this.ipv6Canonical;
}
public void setIpv6Canonical(boolean ipv6Canonical) {
this.ipv6Canonical = ipv6Canonical;
}
public boolean isRequestAttributesEnabled() {
return this.requestAttributesEnabled;
}
public void setRequestAttributesEnabled(boolean requestAttributesEnabled) {
this.requestAttributesEnabled = requestAttributesEnabled;
}
public boolean isBuffered() {
return this.buffered;
}
public void setBuffered(boolean buffered) {
this.buffered = buffered;
}
}
/**
* Tomcat thread properties.
*/
public static
|
Accesslog
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/SchemaMigrationTargetScriptCreationTest.java
|
{
"start": 1759,
"end": 3377
}
|
class ____ implements ServiceRegistryProducer {
private final File output;
public SchemaMigrationTargetScriptCreationTest(@TempDir File outputDir) {
this.output = new File( outputDir, "update_script.sql" );
}
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder builder) {
return builder.applySetting( JAKARTA_HBM2DDL_DATABASE_ACTION, "update" )
.applySetting( JAKARTA_HBM2DDL_SCRIPTS_ACTION, "update" )
.applySetting( JAKARTA_HBM2DDL_SCRIPTS_CREATE_TARGET, output.getAbsolutePath() )
.build();
}
@BeforeEach
void setUp(DomainModelScope modelScope) {
// for whatever reason, on CI, these tables sometimes exist (sigh)
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE, TargetType.STDOUT ), modelScope.getDomainModel() );
}
@AfterEach
public void tearDown(DomainModelScope modelScope) {
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE, TargetType.STDOUT ), modelScope.getDomainModel() );
}
@Test
@JiraKey(value = "HHH-10684")
public void testTargetScriptIsCreated(SessionFactoryScope factoryScope) throws Exception {
factoryScope.getSessionFactory();
String fileContent = new String( Files.readAllBytes( output.toPath() ) );
Pattern fileContentPattern = Pattern.compile( "create( (column|row))? table test_entity" );
Matcher fileContentMatcher = fileContentPattern.matcher( fileContent.toLowerCase() );
MatcherAssert.assertThat( "Script file : " + fileContent.toLowerCase(), fileContentMatcher.find(), is( true ) );
}
@Entity
@Table(name = "test_entity")
public static
|
SchemaMigrationTargetScriptCreationTest
|
java
|
apache__flink
|
flink-formats/flink-protobuf/src/main/java/org/apache/flink/formats/protobuf/PbDecodingFormat.java
|
{
"start": 1458,
"end": 2260
}
|
class ____ implements DecodingFormat<DeserializationSchema<RowData>> {
private final PbFormatConfig formatConfig;
public PbDecodingFormat(PbFormatConfig formatConfig) {
this.formatConfig = formatConfig;
}
@Override
public DeserializationSchema<RowData> createRuntimeDecoder(
DynamicTableSource.Context context, DataType producedDataType) {
final RowType rowType = (RowType) producedDataType.getLogicalType();
final TypeInformation<RowData> rowDataTypeInfo =
context.createTypeInformation(producedDataType);
return new PbRowDataDeserializationSchema(rowType, rowDataTypeInfo, formatConfig);
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
}
|
PbDecodingFormat
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics-test/src/main/java/org/springframework/boot/micrometer/metrics/test/autoconfigure/AutoConfigureMetrics.java
|
{
"start": 1167,
"end": 1557
}
|
class ____ enable auto-configuration for
* metrics.
* <p>
* If this annotation is applied to a sliced test, an in-memory {@link MeterRegistry} and
* an {@link ObservationRegistry} are added to the application context.
*
* @author Moritz Halbritter
* @since 4.0.0
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
@ImportAutoConfiguration
public @
|
to
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/jdk8/ParallelMapOptional.java
|
{
"start": 2377,
"end": 4474
}
|
class ____<T, R> implements ConditionalSubscriber<T>, Subscription {
final Subscriber<? super R> downstream;
final Function<? super T, Optional<? extends R>> mapper;
Subscription upstream;
boolean done;
ParallelMapSubscriber(Subscriber<? super R> actual, Function<? super T, Optional<? extends R>> mapper) {
this.downstream = actual;
this.mapper = mapper;
}
@Override
public void request(long n) {
upstream.request(n);
}
@Override
public void cancel() {
upstream.cancel();
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
if (!tryOnNext(t)) {
upstream.request(1);
}
}
@Override
public boolean tryOnNext(T t) {
if (done) {
return true;
}
Optional<? extends R> v;
try {
v = Objects.requireNonNull(mapper.apply(t), "The mapper returned a null Optional");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
cancel();
onError(ex);
return true;
}
if (v.isPresent()) {
downstream.onNext(v.get());
return true;
}
return false;
}
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
done = true;
downstream.onError(t);
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
downstream.onComplete();
}
}
static final
|
ParallelMapSubscriber
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/ExtendedCamelContext.java
|
{
"start": 12373,
"end": 12560
}
|
class ____ META-INF
*/
void setDefaultFactoryFinder(FactoryFinder factoryFinder);
/**
* Gets the bootstrap FactoryFinder which will be used for the loading the factory
|
from
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/examples/JsonPointerExamples.java
|
{
"start": 588,
"end": 1374
}
|
class ____ {
public void example1Pointers() {
// Build a pointer from a string
JsonPointer pointer1 = JsonPointer.from("/hello/world");
// Build a pointer manually
JsonPointer pointer2 = JsonPointer.create()
.append("hello")
.append("world");
}
public void example2Pointers(JsonPointer objectPointer, JsonObject jsonObject, JsonPointer arrayPointer, JsonArray jsonArray) {
// Query a JsonObject
Object result1 = objectPointer.queryJson(jsonObject);
// Query a JsonArray
Object result2 = arrayPointer.queryJson(jsonArray);
// Write starting from a JsonObject
objectPointer.writeJson(jsonObject, "new element");
// Write starting from a JsonObject
arrayPointer.writeJson(jsonArray, "new element");
}
}
|
JsonPointerExamples
|
java
|
apache__camel
|
components/camel-infinispan/camel-infinispan/src/test/java/org/apache/camel/component/infinispan/remote/cluster/InfinispanRemoteClusteredTestSupport.java
|
{
"start": 1275,
"end": 3126
}
|
class ____ {
private InfinispanRemoteClusteredTestSupport() {
}
public static Configuration createConfiguration(InfinispanService service) {
if (SystemUtils.IS_OS_MAC) {
Properties properties = new Properties();
properties.put("infinispan.client.hotrod.client_intelligence", "BASIC");
return new ConfigurationBuilder()
.withProperties(properties)
.addServer()
.host(service.host())
.port(service.port())
.security()
.authentication()
.username(service.username())
.password(service.password())
.serverName("infinispan")
.saslMechanism("DIGEST-MD5")
.realm("default")
.build();
} else {
return new ConfigurationBuilder()
.addServer()
.host(service.host())
.port(service.port())
.security()
.authentication()
.username(service.username())
.password(service.password())
.serverName("infinispan")
.saslMechanism("DIGEST-MD5")
.realm("default")
.build();
}
}
public static void createCache(RemoteCacheManager cacheContainer, String cacheName) {
cacheContainer.administration()
.getOrCreateCache(
cacheName,
new org.infinispan.configuration.cache.ConfigurationBuilder()
.clustering()
.cacheMode(CacheMode.DIST_SYNC).build());
}
}
|
InfinispanRemoteClusteredTestSupport
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-orm-envers/src/main/java/io/quarkus/it/envers/Message2Provider.java
|
{
"start": 618,
"end": 2093
}
|
class ____ implements MessageBodyReader<Message2>, MessageBodyWriter<Message2> {
@Override
public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return Message2.class.isAssignableFrom(type);
}
@Override
public Message2 readFrom(Class<Message2> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException {
return new Message2("in");
}
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return Message2.class.isAssignableFrom(type);
}
@Override
public void writeTo(Message2 event, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
String data = "out";
if (annotations != null) {
for (Annotation annotation : annotations) {
if (annotation.annotationType().equals(CustomOutput.class)) {
data = ((CustomOutput) annotation).value();
break;
}
}
}
entityStream.write(String.format("{\"data\": \"%s\"}", data).getBytes(StandardCharsets.UTF_8));
}
}
|
Message2Provider
|
java
|
apache__maven
|
impl/maven-cli/src/main/java/org/apache/maven/cling/transfer/SimplexTransferListener.java
|
{
"start": 1859,
"end": 8054
}
|
class ____ extends AbstractTransferListener implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(SimplexTransferListener.class);
private static final int QUEUE_SIZE = 1024;
private static final int BATCH_MAX_SIZE = 500;
private final TransferListener delegate;
private final int batchMaxSize;
private final boolean blockOnLastEvent;
private final ArrayBlockingQueue<Exchange> eventQueue;
private final AtomicBoolean closed;
private final Thread updater;
/**
* Constructor that makes passed in delegate run on single thread, and will block on last event.
*/
public SimplexTransferListener(TransferListener delegate) {
this(delegate, QUEUE_SIZE, BATCH_MAX_SIZE, true);
}
/**
* Constructor that may alter behaviour of this listener.
*
* @param delegate The delegate that should run on single thread.
* @param queueSize The event queue size (default {@code 1024}).
* @param batchMaxSize The maximum batch size delegate should receive (default {@code 500}).
* @param blockOnLastEvent Should this listener block on last transfer end (completed or corrupted) block? (default {@code true}).
*/
public SimplexTransferListener(
TransferListener delegate, int queueSize, int batchMaxSize, boolean blockOnLastEvent) {
this.delegate = requireNonNull(delegate);
if (queueSize < 1 || batchMaxSize < 1) {
throw new IllegalArgumentException("Queue and batch sizes must be greater than 1");
}
this.batchMaxSize = batchMaxSize;
this.blockOnLastEvent = blockOnLastEvent;
this.eventQueue = new ArrayBlockingQueue<>(queueSize);
this.closed = new AtomicBoolean(false);
this.updater = new Thread(this::feedConsumer, "simplex-transfer-listener");
updater.setDaemon(true);
updater.start();
}
@Override
public void close() {
if (closed.compareAndSet(false, true)) {
updater.interrupt();
}
}
public TransferListener getDelegate() {
return delegate;
}
private void feedConsumer() {
final ArrayList<Exchange> batch = new ArrayList<>(batchMaxSize);
try {
while (true) {
batch.clear();
if (eventQueue.drainTo(batch, BATCH_MAX_SIZE) == 0) {
batch.add(eventQueue.take());
}
demux(batch);
}
} catch (InterruptedException ignored) {
// silent
}
}
private void demux(List<Exchange> exchanges) {
for (Exchange exchange : exchanges) {
exchange.process(transferEvent -> {
TransferEvent.EventType type = transferEvent.getType();
try {
switch (type) {
case INITIATED:
delegate.transferInitiated(transferEvent);
break;
case STARTED:
delegate.transferStarted(transferEvent);
break;
case PROGRESSED:
delegate.transferProgressed(transferEvent);
break;
case CORRUPTED:
delegate.transferCorrupted(transferEvent);
break;
case SUCCEEDED:
delegate.transferSucceeded(transferEvent);
break;
case FAILED:
delegate.transferFailed(transferEvent);
break;
default:
LOGGER.warn("Invalid TransferEvent.EventType={}; ignoring it", type);
}
} catch (TransferCancelledException e) {
ongoing.put(new TransferResourceIdentifier(transferEvent.getResource()), Boolean.FALSE);
}
});
}
}
private void put(TransferEvent event, boolean last) {
try {
Exchange exchange;
if (blockOnLastEvent && last) {
exchange = new BlockingExchange(event);
} else {
exchange = new Exchange(event);
}
eventQueue.put(exchange);
exchange.waitForProcessed();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private final ConcurrentHashMap<TransferResourceIdentifier, Boolean> ongoing = new ConcurrentHashMap<>();
@Override
public void transferInitiated(TransferEvent event) {
ongoing.putIfAbsent(new TransferResourceIdentifier(event.getResource()), Boolean.TRUE);
put(event, false);
}
@Override
public void transferStarted(TransferEvent event) throws TransferCancelledException {
if (ongoing.get(new TransferResourceIdentifier(event.getResource())) == Boolean.FALSE) {
throw new TransferCancelledException();
}
put(event, false);
}
@Override
public void transferProgressed(TransferEvent event) throws TransferCancelledException {
if (ongoing.get(new TransferResourceIdentifier(event.getResource())) == Boolean.FALSE) {
throw new TransferCancelledException();
}
put(event, false);
}
@Override
public void transferCorrupted(TransferEvent event) throws TransferCancelledException {
if (ongoing.get(new TransferResourceIdentifier(event.getResource())) == Boolean.FALSE) {
throw new TransferCancelledException();
}
put(event, false);
}
@Override
public void transferSucceeded(TransferEvent event) {
ongoing.remove(new TransferResourceIdentifier(event.getResource()));
put(event, ongoing.isEmpty());
}
@Override
public void transferFailed(TransferEvent event) {
ongoing.remove(new TransferResourceIdentifier(event.getResource()));
put(event, ongoing.isEmpty());
}
private static
|
SimplexTransferListener
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/tuple/internal/AnonymousTupleNonAggregatedEntityIdentifierMapping.java
|
{
"start": 1033,
"end": 3593
}
|
class ____ extends AnonymousTupleEmbeddableValuedModelPart
implements NonAggregatedIdentifierMapping {
private final NonAggregatedIdentifierMapping delegate;
public AnonymousTupleNonAggregatedEntityIdentifierMapping(
SqmExpressible<?> sqmExpressible,
SqlTypedMapping[] sqlTypedMappings,
int selectionIndex,
String selectionExpression,
Set<String> compatibleTableExpressions,
Set<? extends Attribute<?, ?>> attributes,
DomainType<?> domainType,
String componentName,
NonAggregatedIdentifierMapping delegate) {
super(
sqmExpressible,
sqlTypedMappings,
selectionIndex,
selectionExpression,
compatibleTableExpressions,
attributes,
domainType,
componentName,
delegate,
-1
);
this.delegate = delegate;
}
@Override
public Nature getNature() {
return Nature.VIRTUAL;
}
@Override
public String getAttributeName() {
return null;
}
@Override
public IdentifierValue getUnsavedStrategy() {
return delegate.getUnsavedStrategy();
}
@Override
public Object getIdentifier(Object entity) {
return delegate.getIdentifier( entity );
}
@Override
public Object getIdentifier(Object entity, MergeContext mergeContext) {
return delegate.getIdentifier( entity, mergeContext );
}
@Override
public void setIdentifier(Object entity, Object id, SharedSessionContractImplementor session) {
delegate.setIdentifier( entity, id, session );
}
@Override
public Object instantiate() {
return delegate.instantiate();
}
@Override
public boolean hasContainingClass() {
return true;
}
@Override
public EmbeddableMappingType getMappedIdEmbeddableTypeDescriptor() {
return this;
}
@Override
public EmbeddableMappingType getMappedType() {
return this;
}
@Override
public EmbeddableMappingType getPartMappingType() {
return this;
}
@Override
public VirtualIdEmbeddable getVirtualIdEmbeddable() {
return delegate.getVirtualIdEmbeddable();
}
@Override
public IdClassEmbeddable getIdClassEmbeddable() {
return delegate.getIdClassEmbeddable();
}
@Override
public IdentifierValueMapper getIdentifierValueMapper() {
return delegate.getIdentifierValueMapper();
}
@Override
public FetchStyle getStyle() {
return FetchStyle.JOIN;
}
@Override
public FetchTiming getTiming() {
return FetchTiming.IMMEDIATE;
}
@Override
public boolean areEqual(@Nullable Object one, @Nullable Object other, SharedSessionContractImplementor session) {
return delegate.areEqual( one, other, session );
}
}
|
AnonymousTupleNonAggregatedEntityIdentifierMapping
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/cglib/core/GeneratorStrategy.java
|
{
"start": 1225,
"end": 1827
}
|
class ____ on which you can call {@link ClassGenerator#generateClass}
* @return a byte array containing the bits of a valid Class
*/
byte[] generate(ClassGenerator cg) throws Exception;
/**
* The <code>GeneratorStrategy</code> in use does not currently, but may
* in the future, affect the caching of classes generated by {@link
* AbstractClassGenerator}, so this is a reminder that you should
* correctly implement <code>equals</code> and <code>hashCode</code>
* to avoid generating too many classes.
*/
@Override
boolean equals(Object o);
}
|
generator
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/node/PluginComponentBinding.java
|
{
"start": 1881,
"end": 2187
}
|
class ____ extends TransportMasterNodeAction<MyRequest, MyResponse> {
@Inject
public TransportMyAction(MyInterface myInterface) {
this.myInterface = myInterface; //implementation may vary depending on extensions defined for
}
...
}
}</pre>
*<p>
* Note - usage of the
|
TransportMyAction
|
java
|
apache__camel
|
components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/internal/AvailablePhoneNumberCountryLocalApiMethod.java
|
{
"start": 702,
"end": 1911
}
|
enum ____ implements ApiMethod {
READER(
com.twilio.rest.api.v2010.account.availablephonenumbercountry.LocalReader.class,
"reader",
arg("pathCountryCode", String.class)),
READER_1(
com.twilio.rest.api.v2010.account.availablephonenumbercountry.LocalReader.class,
"reader",
arg("pathAccountSid", String.class),
arg("pathCountryCode", String.class));
private final ApiMethod apiMethod;
AvailablePhoneNumberCountryLocalApiMethod(Class<?> resultType, String name, ApiMethodArg... args) {
this.apiMethod = new ApiMethodImpl(Local.class, resultType, name, args);
}
@Override
public String getName() { return apiMethod.getName(); }
@Override
public Class<?> getResultType() { return apiMethod.getResultType(); }
@Override
public List<String> getArgNames() { return apiMethod.getArgNames(); }
@Override
public List<String> getSetterArgNames() { return apiMethod.getSetterArgNames(); }
@Override
public List<Class<?>> getArgTypes() { return apiMethod.getArgTypes(); }
@Override
public Method getMethod() { return apiMethod.getMethod(); }
}
|
AvailablePhoneNumberCountryLocalApiMethod
|
java
|
quarkusio__quarkus
|
extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/valueextractor/SingletonCustomValueExtractorTest.java
|
{
"start": 1161,
"end": 1493
}
|
class ____ {
public Container<@NotBlank String> constrainedContainer;
public TestBean() {
Container<String> invalidContainer = new Container<>();
invalidContainer.value = " ";
this.constrainedContainer = invalidContainer;
}
}
@Singleton
public static
|
TestBean
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/provider/BeanOne.java
|
{
"start": 90,
"end": 130
}
|
class ____ implements BeanNumber {
}
|
BeanOne
|
java
|
resilience4j__resilience4j
|
resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/internal/CircuitBreakerStateMachine.java
|
{
"start": 2180,
"end": 17680
}
|
class ____ implements CircuitBreaker {
private static final Logger LOG = LoggerFactory.getLogger(CircuitBreakerStateMachine.class);
private final String name;
private final AtomicReference<CircuitBreakerState> stateReference;
private final CircuitBreakerConfig circuitBreakerConfig;
private final Map<String, String> tags;
private final CircuitBreakerEventProcessor eventProcessor;
private final Clock clock;
private final SchedulerFactory schedulerFactory;
private final Function<Clock, Long> currentTimestampFunction;
private final TimeUnit timestampUnit;
private final ReentrantLock lock = new ReentrantLock();
/**
* Creates a circuitBreaker.
*
* @param name the name of the CircuitBreaker
* @param circuitBreakerConfig The CircuitBreaker configuration.
* @param schedulerFactory A SchedulerFactory which can be mocked in tests.
*/
private CircuitBreakerStateMachine(String name, CircuitBreakerConfig circuitBreakerConfig,
SchedulerFactory schedulerFactory, Map<String, String> tags) {
this.name = name;
this.circuitBreakerConfig = Objects
.requireNonNull(circuitBreakerConfig, "Config must not be null");
this.eventProcessor = new CircuitBreakerEventProcessor();
this.clock = circuitBreakerConfig.getClock();
this.schedulerFactory = schedulerFactory;
this.tags = Objects.requireNonNull(tags, "Tags must not be null");
this.currentTimestampFunction = circuitBreakerConfig.getCurrentTimestampFunction();
this.timestampUnit = circuitBreakerConfig.getTimestampUnit();
this.stateReference = new AtomicReference<>(
getCircuitBreakerStateObjectFromState(circuitBreakerConfig.getInitialState()));
}
/**
* Creates a circuitBreaker.
*
* @param name the name of the CircuitBreaker
* @param circuitBreakerConfig The CircuitBreaker configuration.
* @param schedulerFactory A SchedulerFactory which can be mocked in tests.
*/
public CircuitBreakerStateMachine(String name, CircuitBreakerConfig circuitBreakerConfig,
SchedulerFactory schedulerFactory) {
this(name, circuitBreakerConfig, schedulerFactory, emptyMap());
}
/**
* Creates a circuitBreaker.
*
* @param name the name of the CircuitBreaker
* @param circuitBreakerConfig The CircuitBreaker configuration.
*/
public CircuitBreakerStateMachine(String name, CircuitBreakerConfig circuitBreakerConfig) {
this(name, circuitBreakerConfig, SchedulerFactory.getInstance(), emptyMap());
}
/**
* Creates a circuitBreaker.
*
* @param name the name of the CircuitBreaker
* @param circuitBreakerConfig The CircuitBreaker configuration.
*/
public CircuitBreakerStateMachine(String name, CircuitBreakerConfig circuitBreakerConfig,
Map<String, String> tags) {
this(name, circuitBreakerConfig, SchedulerFactory.getInstance(), tags);
}
/**
* Creates a circuitBreaker with default config.
*
* @param name the name of the CircuitBreaker
*/
public CircuitBreakerStateMachine(String name) {
this(name, CircuitBreakerConfig.ofDefaults());
}
/**
* Creates a circuitBreaker.
*
* @param name the name of the CircuitBreaker
* @param circuitBreakerConfig The CircuitBreaker configuration supplier.
*/
public CircuitBreakerStateMachine(String name,
Supplier<CircuitBreakerConfig> circuitBreakerConfig) {
this(name, circuitBreakerConfig.get());
}
/**
* Creates a circuitBreaker.
*
* @param name the name of the CircuitBreaker
* @param circuitBreakerConfig The CircuitBreaker configuration supplier.
*/
public CircuitBreakerStateMachine(String name,
Supplier<CircuitBreakerConfig> circuitBreakerConfig,
Map<String, String> tags) {
this(name, circuitBreakerConfig.get(), tags);
}
@Override
public long getCurrentTimestamp() {
return this.currentTimestampFunction.apply(clock);
}
@Override
public TimeUnit getTimestampUnit() {
return timestampUnit;
}
@Override
public boolean tryAcquirePermission() {
boolean callPermitted = stateReference.get().tryAcquirePermission();
if (!callPermitted) {
publishCallNotPermittedEvent();
}
return callPermitted;
}
@Override
public void releasePermission() {
stateReference.get().releasePermission();
}
@Override
public void acquirePermission() {
try {
stateReference.get().acquirePermission();
} catch (Exception e) {
publishCallNotPermittedEvent();
throw e;
}
}
@Override
public void onError(long duration, TimeUnit durationUnit, Throwable throwable) {
// Handle the case if the completable future throws a CompletionException wrapping the original exception
// where original exception is the one to retry not the CompletionException.
if (throwable instanceof CompletionException || throwable instanceof ExecutionException) {
Throwable cause = throwable.getCause();
handleThrowable(duration, durationUnit, cause);
} else {
handleThrowable(duration, durationUnit, throwable);
}
}
private void handleThrowable(long duration, TimeUnit durationUnit, Throwable throwable) {
if (circuitBreakerConfig.getIgnoreExceptionPredicate().test(throwable)) {
LOG.debug("CircuitBreaker '{}' ignored an exception:", name, throwable);
releasePermission();
publishCircuitIgnoredErrorEvent(name, duration, durationUnit, throwable);
return;
}
if (circuitBreakerConfig.getRecordExceptionPredicate().test(throwable)) {
LOG.debug("CircuitBreaker '{}' recorded an exception as failure:", name, throwable);
publishCircuitErrorEvent(name, duration, durationUnit, throwable);
stateReference.get().onError(duration, durationUnit, throwable);
} else {
LOG.debug("CircuitBreaker '{}' recorded an exception as success:", name, throwable);
publishSuccessEvent(duration, durationUnit);
stateReference.get().onSuccess(duration, durationUnit);
}
handlePossibleTransition(Either.right(throwable));
}
@Override
public void onSuccess(long duration, TimeUnit durationUnit) {
LOG.debug("CircuitBreaker '{}' succeeded:", name);
publishSuccessEvent(duration, durationUnit);
stateReference.get().onSuccess(duration, durationUnit);
}
@Override
public void onResult(long duration, TimeUnit durationUnit, @Nullable Object result) {
if (result != null && circuitBreakerConfig.getRecordResultPredicate().test(result)) {
LOG.debug("CircuitBreaker '{}' recorded a result type '{}' as failure:", name, result.getClass());
ResultRecordedAsFailureException failure = new ResultRecordedAsFailureException(name, result);
publishCircuitErrorEvent(name, duration, durationUnit, failure);
stateReference.get().onError(duration, durationUnit, failure);
} else {
onSuccess(duration, durationUnit);
if (result != null) {
handlePossibleTransition(Either.left(result));
}
}
}
private void handlePossibleTransition(Either<Object, Throwable> result) {
CircuitBreakerConfig.TransitionCheckResult transitionCheckResult = circuitBreakerConfig.getTransitionOnResult()
.apply(result);
stateReference.get().handlePossibleTransition(transitionCheckResult);
}
/**
* Get the state of this CircuitBreaker.
*
* @return the state of this CircuitBreaker
*/
@Override
public State getState() {
return this.stateReference.get().getState();
}
/**
* Get the name of this CircuitBreaker.
*
* @return the name of this CircuitBreaker
*/
@Override
public String getName() {
return this.name;
}
/**
* Get the config of this CircuitBreaker.
*
* @return the config of this CircuitBreaker
*/
@Override
public CircuitBreakerConfig getCircuitBreakerConfig() {
return circuitBreakerConfig;
}
@Override
public Metrics getMetrics() {
return this.stateReference.get().getMetrics();
}
@Override
public Map<String, String> getTags() {
return tags;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return String.format("CircuitBreaker '%s'", this.name);
}
@Override
public void reset() {
CircuitBreakerState previousState = stateReference
.getAndUpdate(currentState -> new ClosedState());
if (previousState.getState() != CLOSED) {
publishStateTransitionEvent(
StateTransition.transitionBetween(getName(), previousState.getState(), CLOSED));
}
publishResetEvent();
}
private void stateTransition(State newState,
UnaryOperator<CircuitBreakerState> newStateGenerator) {
CircuitBreakerState previousState = stateReference.getAndUpdate(currentState -> {
StateTransition.transitionBetween(getName(), currentState.getState(), newState);
currentState.preTransitionHook();
return newStateGenerator.apply(currentState);
});
publishStateTransitionEvent(
StateTransition.transitionBetween(getName(), previousState.getState(), newState));
}
@Override
public void transitionToDisabledState() {
stateTransition(DISABLED, currentState -> new DisabledState());
}
@Override
public void transitionToMetricsOnlyState() {
stateTransition(METRICS_ONLY, currentState -> new MetricsOnlyState());
}
@Override
public void transitionToForcedOpenState() {
stateTransition(FORCED_OPEN,
currentState -> new ForcedOpenState(currentState.attempts() + 1));
}
@Override
public void transitionToClosedState() {
stateTransition(CLOSED, currentState -> new ClosedState());
}
@Override
public void transitionToOpenState() {
stateTransition(OPEN,
currentState -> new OpenState(currentState.attempts() + 1, currentState.getMetrics()));
}
@Override
public void transitionToOpenStateFor(Duration waitDuration) {
stateTransition(OPEN,
currentState -> new OpenState(currentState.attempts(), waitDuration, currentState.getMetrics()));
}
@Override
public void transitionToOpenStateUntil(Instant waitUntil) {
stateTransition(OPEN,
currentState -> new OpenState(currentState.attempts(), waitUntil, currentState.getMetrics()));
}
@Override
public void transitionToHalfOpenState() {
stateTransition(HALF_OPEN, currentState -> new HalfOpenState(currentState.attempts()));
}
private boolean shouldPublishEvents(CircuitBreakerEvent event) {
return stateReference.get().shouldPublishEvents(event);
}
private void publishEventIfHasConsumer(CircuitBreakerEvent event) {
if (!eventProcessor.hasConsumers()) {
LOG.debug("No Consumers: Event {} not published", event.getEventType());
return;
}
publishEvent(event);
}
private void publishEvent(CircuitBreakerEvent event) {
if (shouldPublishEvents(event)) {
try {
eventProcessor.consumeEvent(event);
LOG.debug("Event {} published: {}", event.getEventType(), event);
} catch (Throwable t) {
LOG.warn("Failed to handle event {}", event.getEventType(), t);
}
} else {
LOG.debug("Publishing not allowed: Event {} not published", event.getEventType());
}
}
private void publishStateTransitionEvent(final StateTransition stateTransition) {
if (StateTransition.isInternalTransition(stateTransition)) {
return;
}
publishEventIfHasConsumer(new CircuitBreakerOnStateTransitionEvent(name, stateTransition));
}
private void publishResetEvent() {
publishEventIfHasConsumer(new CircuitBreakerOnResetEvent(name));
}
private void publishCallNotPermittedEvent() {
publishEventIfHasConsumer(new CircuitBreakerOnCallNotPermittedEvent(name));
}
private void publishSuccessEvent(final long duration, TimeUnit durationUnit) {
if (eventProcessor.hasConsumers()) {
publishEvent(new CircuitBreakerOnSuccessEvent(name, elapsedDuration(duration, durationUnit)));
}
}
private Duration elapsedDuration(final long duration, TimeUnit durationUnit) {
return Duration.ofNanos(durationUnit.toNanos(duration));
}
private void publishCircuitErrorEvent(final String name, final long duration, final TimeUnit durationUnit,
final Throwable throwable) {
if (eventProcessor.hasConsumers()) {
final Duration elapsedDuration = elapsedDuration(duration, durationUnit);
publishEvent(new CircuitBreakerOnErrorEvent(name, elapsedDuration, throwable));
}
}
private void publishCircuitIgnoredErrorEvent(final String name, long duration, final TimeUnit durationUnit,
final Throwable throwable) {
final Duration elapsedDuration = elapsedDuration(duration, durationUnit);
publishEventIfHasConsumer(new CircuitBreakerOnIgnoredErrorEvent(name, elapsedDuration, throwable));
}
private void publishCircuitFailureRateExceededEvent(final String name, float failureRate) {
publishEventIfHasConsumer(new CircuitBreakerOnFailureRateExceededEvent(name, failureRate));
}
private void publishCircuitSlowCallRateExceededEvent(final String name, float slowCallRate) {
publishEventIfHasConsumer(new CircuitBreakerOnSlowCallRateExceededEvent(name, slowCallRate));
}
private void publishCircuitThresholdsExceededEvent(final Result result, final CircuitBreakerMetrics metrics) {
if (Result.hasFailureRateExceededThreshold(result)) {
publishCircuitFailureRateExceededEvent(getName(), metrics.getFailureRate());
}
if (Result.hasSlowCallRateExceededThreshold(result)) {
publishCircuitSlowCallRateExceededEvent(getName(), metrics.getSlowCallRate());
}
}
@Override
public EventPublisher getEventPublisher() {
return eventProcessor;
}
private CircuitBreakerState getCircuitBreakerStateObjectFromState(State state){
return switch (state) {
case DISABLED -> new DisabledState();
case CLOSED -> new ClosedState();
case METRICS_ONLY -> new MetricsOnlyState();
case HALF_OPEN -> new HalfOpenState(1);
case FORCED_OPEN -> new ForcedOpenState(1);
case OPEN -> new OpenState(1, CircuitBreakerMetrics.forClosed(getCircuitBreakerConfig()));
default -> new ClosedState();
};
}
private
|
CircuitBreakerStateMachine
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/utils/ImplicitLinkedHashCollection.java
|
{
"start": 7858,
"end": 8313
}
|
class ____ extends AbstractSequentialList<E> {
@Override
public ListIterator<E> listIterator(int index) {
if (index < 0 || index > size) {
throw new IndexOutOfBoundsException();
}
return ImplicitLinkedHashCollection.this.listIterator(index);
}
@Override
public int size() {
return size;
}
}
private
|
ImplicitLinkedHashCollectionListView
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_2100/Issue2179.java
|
{
"start": 5892,
"end": 6458
}
|
class ____ implements ObjectDeserializer {
@SuppressWarnings("unchecked")
public <T> T deserialze(DefaultJSONParser parser, Type type, Object fieldName) {
String val = StringCodec.instance.deserialze(parser, type, fieldName);
System.out.println("-----------------EnumAwareSerializer1.deserialze-----------------------------");
System.out.println(val);
return (T) ProductType1.get(JSON.parseObject(val).getInteger("code"));
}
@Override
public int getFastMatchToken() {
return JSONToken.LITERAL_STRING;
}
}
public static
|
EnumAwareSerializer1
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/integration/AbstractConfiguratorListener.java
|
{
"start": 2167,
"end": 5431
}
|
class ____ implements ConfigurationListener {
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(AbstractConfiguratorListener.class);
protected List<Configurator> configurators = Collections.emptyList();
protected GovernanceRuleRepository ruleRepository;
protected Set<String> securityKey = new HashSet<>();
protected ModuleModel moduleModel;
public AbstractConfiguratorListener(ModuleModel moduleModel) {
this.moduleModel = moduleModel;
ruleRepository =
moduleModel.getExtensionLoader(GovernanceRuleRepository.class).getDefaultExtension();
initSecurityKey();
}
private void initSecurityKey() {
// FileRouterFactory key
securityKey.add(ACCESS_LOG_FIXED_PATH_KEY);
securityKey.add(ROUTER_KEY);
securityKey.add(RULE_KEY);
securityKey.add(RUNTIME_KEY);
securityKey.add(TYPE_KEY);
}
protected final void initWith(String key) {
ruleRepository.addListener(key, this);
String rawConfig = ruleRepository.getRule(key, DynamicConfiguration.DEFAULT_GROUP);
if (!StringUtils.isEmpty(rawConfig)) {
genConfiguratorsFromRawRule(rawConfig);
}
}
protected final void stopListen(String key) {
ruleRepository.removeListener(key, this);
}
@Override
public void process(ConfigChangedEvent event) {
if (logger.isInfoEnabled()) {
logger.info("Notification of overriding rule, change type is: " + event.getChangeType()
+ ", raw config content is:\n " + event.getContent());
}
if (event.getChangeType().equals(ConfigChangeType.DELETED)) {
configurators.clear();
} else {
// ADDED or MODIFIED
if (!genConfiguratorsFromRawRule(event.getContent())) {
return;
}
}
notifyOverrides();
}
private boolean genConfiguratorsFromRawRule(String rawConfig) {
List<URL> urls;
try {
// parseConfigurators will recognize app/service config automatically.
urls = ConfigParser.parseConfigurators(rawConfig);
} catch (Exception e) {
// 1-14 - Failed to parse raw dynamic config.
logger.warn(
REGISTRY_FAILED_PARSE_DYNAMIC_CONFIG,
"",
"",
"Failed to parse raw dynamic config and it will not take effect, the raw config is: " + rawConfig
+ ", cause: " + e.getMessage());
return false;
}
List<URL> safeUrls = urls.stream()
.map(url -> url.removeParameters(securityKey))
.map(url -> url.setScopeModel(moduleModel))
.collect(Collectors.toList());
configurators = Configurator.toConfigurators(safeUrls).orElse(configurators);
return true;
}
protected abstract void notifyOverrides();
public List<Configurator> getConfigurators() {
return configurators;
}
public void setConfigurators(List<Configurator> configurators) {
this.configurators = configurators;
}
}
|
AbstractConfiguratorListener
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/StompEndpointBuilderFactory.java
|
{
"start": 6220,
"end": 13259
}
|
interface ____
extends
EndpointConsumerBuilder {
default StompEndpointConsumerBuilder basic() {
return (StompEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* To use a custom HeaderFilterStrategy to filter header to and from
* Camel message.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: advanced
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder headerFilterStrategy(org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* To use a custom HeaderFilterStrategy to filter header to and from
* Camel message.
*
* The option will be converted to a
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: advanced
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default AdvancedStompEndpointConsumerBuilder headerFilterStrategy(String headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
}
/**
* Builder for endpoint producers for the Stomp component.
*/
public
|
AdvancedStompEndpointConsumerBuilder
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-kinesis/src/test/java/org/apache/camel/component/aws2/kinesis/integration/KinesisConsumerResumeIT.java
|
{
"start": 3340,
"end": 3679
}
|
class ____ {
private String partition;
private String body;
@Override
public String toString() {
return "KinesisData{" +
"partition='" + partition + '\'' +
", body='" + body + '\'' +
'}';
}
}
private static final
|
KinesisData
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/groovy/GroovyBeanDefinitionReaderTests.java
|
{
"start": 26529,
"end": 26588
}
|
class ____ {
void start() {
/* no-op */
}
}
|
HolyGrailQuest
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/EntropyInjector.java
|
{
"start": 1231,
"end": 1373
}
|
class ____ utilities for entropy injection for FileSystems that implement {@link
* EntropyInjectingFileSystem}.
*/
@PublicEvolving
public
|
offers
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/future/FutureAssert_failsWithin_Test.java
|
{
"start": 1264,
"end": 4962
}
|
class ____ extends AbstractFutureTest {
private static final Duration ONE_SECOND = Duration.ofSeconds(1);
@Test
void should_pass_when_future_does_not_complete_within_timeout_Duration() {
// GIVEN
Future<String> future = futureCompletingAfter(ONE_SECOND, executorService);
// WHEN/THEN
assertThat(future).failsWithin(Duration.ofMillis(50));
}
@Test
void should_pass_when_future_does_not_complete_within_timeout() {
// GIVEN
Future<String> future = futureCompletingAfter(ONE_SECOND, executorService);
// WHEN/THEN
assertThat(future).failsWithin(50, MILLISECONDS);
}
@Test
void should_allow_assertion_on_future_exception_when_future_did_not_complete_within_timeout_Duration() {
// GIVEN
Future<String> future = futureCompletingAfter(ONE_SECOND, executorService);
// WHEN/THEN
assertThat(future).failsWithin(Duration.ofMillis(50))
.withThrowableOfType(TimeoutException.class)
.withMessage(null);
}
@Test
void should_allow_assertion_on_future_exception_when_future_did_not_complete_within_timeout() {
// GIVEN
Future<String> future = futureCompletingAfter(ONE_SECOND, executorService);
// WHEN/THEN
assertThat(future).failsWithin(50, MILLISECONDS)
.withThrowableThat()
.isInstanceOf(TimeoutException.class)
.withMessage(null);
}
@Test
void should_fail_if_future_completes_within_given_timeout() {
// GIVEN
Future<String> future = futureCompletingAfter(Duration.ofMillis(100), executorService);
// WHEN
var assertionError = expectAssertionError(() -> assertThat(future).failsWithin(1_000, MILLISECONDS));
// THEN
then(assertionError).hasMessageContainingAll("Completed", "to have failed within 1000L MILLISECONDS.");
}
@Test
void should_fail_if_future_completes_within_given_timeout_Duration() {
// GIVEN
Future<String> future = futureCompletingAfter(Duration.ofMillis(100), executorService);
// WHEN
var assertionError = expectAssertionError(() -> assertThat(future).failsWithin(Duration.ofMillis(1_000)));
// THEN
then(assertionError).hasMessageContainingAll("Completed", "to have failed within 1s.");
}
@Test
void should_pass_if_future_is_cancelled() {
// GIVEN
Future<String> future = new CompletableFuture<>();
future.cancel(false);
// WHEN/THEN
then(future).failsWithin(1, MILLISECONDS);
then(future).failsWithin(Duration.ofMillis(1));
}
@Test
void should_pass_if_future_execution_fails() {
// GIVEN
CompletableFuture<String> completableFuture = new CompletableFuture<>();
completableFuture.completeExceptionally(new RuntimeException("boom!"));
Future<String> future = completableFuture;
// WHEN/THEN
then(future).failsWithin(1, MILLISECONDS)
.withThrowableOfType(ExecutionException.class)
.withMessageContaining("boom!");
then(future).failsWithin(Duration.ofMillis(1))
.withThrowableThat()
.isInstanceOf(ExecutionException.class)
.withMessageContaining("boom!");
}
@Test
void should_fail_when_future_is_null() {
// GIVEN
Future<String> future = null;
// WHEN
var assertionError = expectAssertionError(() -> assertThat(future).failsWithin(1, MILLISECONDS));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
private static Future<String> futureCompletingAfter(Duration duration, ExecutorService service) {
return service.submit(() -> {
Thread.sleep(duration.toMillis());
return "ook!";
});
}
}
|
FutureAssert_failsWithin_Test
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/state/internals/OffsetCheckpointTest.java
|
{
"start": 1856,
"end": 7292
}
|
class ____ {
private final String topic = "topic";
@Test
public void testReadWrite() throws IOException {
final File f = TestUtils.tempFile();
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(f);
try {
final Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(new TopicPartition(topic, 0), 0L);
offsets.put(new TopicPartition(topic, 1), 1L);
offsets.put(new TopicPartition(topic, 2), 2L);
checkpoint.write(offsets);
assertEquals(offsets, checkpoint.read());
checkpoint.delete();
assertFalse(f.exists());
offsets.put(new TopicPartition(topic, 3), 3L);
checkpoint.write(offsets);
assertEquals(offsets, checkpoint.read());
} finally {
checkpoint.delete();
}
}
@Test
public void shouldNotWriteCheckpointWhenNoOffsets() throws IOException {
// we do not need to worry about file name uniqueness since this file should not be created
final File f = new File(TestUtils.tempDirectory().getAbsolutePath(), "kafka.tmp");
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(f);
checkpoint.write(Collections.emptyMap());
assertFalse(f.exists());
assertEquals(Collections.emptyMap(), checkpoint.read());
// deleting a non-exist checkpoint file should be fine
checkpoint.delete();
}
@Test
public void shouldDeleteExistingCheckpointWhenNoOffsets() throws IOException {
final File file = TestUtils.tempFile();
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(file);
final Map<TopicPartition, Long> offsets = Collections.singletonMap(new TopicPartition(topic, 0), 1L);
checkpoint.write(offsets);
assertThat(file.exists(), is(true));
assertThat(offsets, is(checkpoint.read()));
checkpoint.write(Collections.emptyMap());
assertThat(file.exists(), is(false));
assertThat(Collections.emptyMap(), is(checkpoint.read()));
}
@Test
public void shouldSkipInvalidOffsetsDuringRead() throws IOException {
final File file = TestUtils.tempFile();
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(file);
try {
final Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(new TopicPartition(topic, 0), -1L);
writeVersion0(offsets, file);
assertTrue(checkpoint.read().isEmpty());
} finally {
checkpoint.delete();
}
}
@Test
public void shouldReadAndWriteSentinelOffset() throws IOException {
final File f = TestUtils.tempFile();
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(f);
final long sentinelOffset = -4L;
try {
final Map<TopicPartition, Long> offsetsToWrite = new HashMap<>();
offsetsToWrite.put(new TopicPartition(topic, 1), sentinelOffset);
checkpoint.write(offsetsToWrite);
final Map<TopicPartition, Long> readOffsets = checkpoint.read();
assertThat(readOffsets.get(new TopicPartition(topic, 1)), equalTo(sentinelOffset));
} finally {
checkpoint.delete();
}
}
@Test
public void shouldThrowOnInvalidOffsetInWrite() throws IOException {
final File f = TestUtils.tempFile();
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(f);
try {
final Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(new TopicPartition(topic, 0), 0L);
offsets.put(new TopicPartition(topic, 1), -1L); // invalid
offsets.put(new TopicPartition(topic, 2), 2L);
assertThrows(IllegalStateException.class, () -> checkpoint.write(offsets));
} finally {
checkpoint.delete();
}
}
@Test
public void shouldThrowIOExceptionWhenWritingToNotExistedFile() {
final Map<TopicPartition, Long> offsetsToWrite = Collections.singletonMap(new TopicPartition(topic, 0), 0L);
final File notExistedFile = new File("/not_existed_dir/not_existed_file");
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(notExistedFile);
final IOException e = assertThrows(IOException.class, () -> checkpoint.write(offsetsToWrite));
assertThat(e.getMessage(), containsString("No such file or directory"));
}
/**
* Write all the offsets following the version 0 format without any verification (eg enforcing offsets >= 0)
*/
static void writeVersion0(final Map<TopicPartition, Long> offsets, final File file) throws IOException {
final FileOutputStream fileOutputStream = new FileOutputStream(file);
try (final BufferedWriter writer = new BufferedWriter(
new OutputStreamWriter(fileOutputStream, StandardCharsets.UTF_8))) {
writeIntLine(writer, 0);
writeIntLine(writer, offsets.size());
for (final Map.Entry<TopicPartition, Long> entry : offsets.entrySet()) {
final TopicPartition tp = entry.getKey();
final Long offset = entry.getValue();
writeEntry(writer, tp, offset);
}
writer.flush();
fileOutputStream.getFD().sync();
}
}
}
|
OffsetCheckpointTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/resilience/ReactiveRetryInterceptorTests.java
|
{
"start": 17054,
"end": 17305
}
|
class ____ {
AtomicInteger counter = new AtomicInteger();
public Mono<Object> retryOperation() {
return Mono.fromCallable(() -> {
counter.incrementAndGet();
throw new NumberFormatException("always fails");
});
}
}
}
|
AlwaysFailsBean
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/field/nullableinjection/C.java
|
{
"start": 694,
"end": 786
}
|
class ____ {
@Inject protected A a;
public A getA() {
return this.a;
}
}
|
C
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/InMemoryBlockBlobStore.java
|
{
"start": 5563,
"end": 6005
}
|
class ____ {
private byte[] content;
private HashMap<String, String> metadata;
private boolean isPageBlob;
@SuppressWarnings("unused") // TODO: use it
private long length;
public Entry(byte[] content, HashMap<String, String> metadata,
boolean isPageBlob, long length) {
this.content = content;
this.metadata = metadata;
this.isPageBlob = isPageBlob;
this.length = length;
}
}
}
|
Entry
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/TransformXpathTest.java
|
{
"start": 1179,
"end": 2370
}
|
class ____ extends ContextTestSupport {
@Test
public void testTransformWithXpath() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(NodeList.class);
String xml = context.getTypeConverter().convertTo(String.class,
new File("src/test/resources/org/apache/camel/processor/students.xml"));
template.sendBody("direct:start", xml);
assertMockEndpointsSatisfied();
NodeList list = mock.getReceivedExchanges().get(0).getIn().getBody(NodeList.class);
assertEquals(2, list.getLength());
assertEquals("Claus", context.getTypeConverter().convertTo(String.class, list.item(0).getTextContent().trim()));
assertEquals("Hadrian", context.getTypeConverter().convertTo(String.class, list.item(1).getTextContent().trim()));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start").transform().xpath("//students/student").to("mock:result");
}
};
}
}
|
TransformXpathTest
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/transport/TripleGoAwayHandler.java
|
{
"start": 1263,
"end": 2166
}
|
class ____ extends ChannelDuplexHandler {
private static final Logger logger = LoggerFactory.getLogger(TripleGoAwayHandler.class);
public TripleGoAwayHandler() {}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof Http2GoAwayFrame) {
final ConnectionHandler connectionHandler =
(ConnectionHandler) ctx.pipeline().get(Constants.CONNECTION_HANDLER_NAME);
if (logger.isInfoEnabled()) {
logger.info("Receive go away frame of " + ctx.channel().localAddress() + " -> "
+ ctx.channel().remoteAddress() + " and will reconnect later.");
}
connectionHandler.onGoAway(ctx.channel());
ReferenceCountUtil.release(msg);
return;
}
super.channelRead(ctx, msg);
}
}
|
TripleGoAwayHandler
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/convert/support/DefaultConversionService.java
|
{
"start": 1483,
"end": 8108
}
|
class ____ extends GenericConversionService {
private static volatile @Nullable DefaultConversionService sharedInstance;
/**
* Create a new {@code DefaultConversionService} with the set of
* {@linkplain DefaultConversionService#addDefaultConverters(ConverterRegistry) default converters}.
*/
public DefaultConversionService() {
addDefaultConverters(this);
}
/**
* Return a shared default {@code ConversionService} instance,
* lazily building it once needed.
* <p><b>NOTE:</b> We highly recommend constructing individual
* {@code ConversionService} instances for customization purposes.
* This accessor is only meant as a fallback for code paths which
* need simple type coercion but cannot access a longer-lived
* {@code ConversionService} instance any other way.
* @return the shared {@code ConversionService} instance (never {@code null})
* @since 4.3.5
*/
public static ConversionService getSharedInstance() {
DefaultConversionService cs = sharedInstance;
if (cs == null) {
synchronized (DefaultConversionService.class) {
cs = sharedInstance;
if (cs == null) {
cs = new DefaultConversionService();
sharedInstance = cs;
}
}
}
return cs;
}
/**
* Add converters appropriate for most environments.
* @param converterRegistry the registry of converters to add to
* (must also be castable to ConversionService, for example, being a {@link ConfigurableConversionService})
* @throws ClassCastException if the given ConverterRegistry could not be cast to a ConversionService
*/
public static void addDefaultConverters(ConverterRegistry converterRegistry) {
addScalarConverters(converterRegistry);
addCollectionConverters(converterRegistry);
converterRegistry.addConverter(new ByteBufferConverter((ConversionService) converterRegistry));
converterRegistry.addConverter(new DateToInstantConverter());
converterRegistry.addConverter(new InstantToDateConverter());
converterRegistry.addConverter(new StringToTimeZoneConverter());
converterRegistry.addConverter(new ZoneIdToTimeZoneConverter());
converterRegistry.addConverter(new ZonedDateTimeToCalendarConverter());
converterRegistry.addConverter(new ObjectToObjectConverter());
converterRegistry.addConverter(new IdToEntityConverter((ConversionService) converterRegistry));
converterRegistry.addConverter(new FallbackObjectToStringConverter());
converterRegistry.addConverter(new ObjectToOptionalConverter((ConversionService) converterRegistry));
converterRegistry.addConverter(new OptionalToObjectConverter((ConversionService) converterRegistry));
}
/**
* Add common collection converters.
* @param converterRegistry the registry of converters to add to
* (must also be castable to ConversionService, for example, being a {@link ConfigurableConversionService})
* @throws ClassCastException if the given ConverterRegistry could not be cast to a ConversionService
* @since 4.2.3
*/
public static void addCollectionConverters(ConverterRegistry converterRegistry) {
ConversionService conversionService = (ConversionService) converterRegistry;
converterRegistry.addConverter(new ArrayToCollectionConverter(conversionService));
converterRegistry.addConverter(new CollectionToArrayConverter(conversionService));
converterRegistry.addConverter(new ArrayToArrayConverter(conversionService));
converterRegistry.addConverter(new CollectionToCollectionConverter(conversionService));
converterRegistry.addConverter(new MapToMapConverter(conversionService));
converterRegistry.addConverter(new ArrayToStringConverter(conversionService));
converterRegistry.addConverter(new StringToArrayConverter(conversionService));
converterRegistry.addConverter(new ArrayToObjectConverter(conversionService));
converterRegistry.addConverter(new ObjectToArrayConverter(conversionService));
converterRegistry.addConverter(new CollectionToStringConverter(conversionService));
converterRegistry.addConverter(new StringToCollectionConverter(conversionService));
converterRegistry.addConverter(new CollectionToObjectConverter(conversionService));
converterRegistry.addConverter(new ObjectToCollectionConverter(conversionService));
converterRegistry.addConverter(new StreamConverter(conversionService));
}
private static void addScalarConverters(ConverterRegistry converterRegistry) {
converterRegistry.addConverterFactory(new NumberToNumberConverterFactory());
converterRegistry.addConverterFactory(new StringToNumberConverterFactory());
converterRegistry.addConverter(Number.class, String.class, new ObjectToStringConverter());
converterRegistry.addConverter(new StringToCharacterConverter());
converterRegistry.addConverter(Character.class, String.class, new ObjectToStringConverter());
converterRegistry.addConverter(new NumberToCharacterConverter());
converterRegistry.addConverterFactory(new CharacterToNumberFactory());
converterRegistry.addConverter(new StringToBooleanConverter());
converterRegistry.addConverter(Boolean.class, String.class, new ObjectToStringConverter());
converterRegistry.addConverterFactory(new StringToEnumConverterFactory());
converterRegistry.addConverter(new EnumToStringConverter((ConversionService) converterRegistry));
converterRegistry.addConverterFactory(new IntegerToEnumConverterFactory());
converterRegistry.addConverter(new EnumToIntegerConverter((ConversionService) converterRegistry));
converterRegistry.addConverter(new StringToLocaleConverter());
converterRegistry.addConverter(Locale.class, String.class, new ObjectToStringConverter());
converterRegistry.addConverter(new StringToCharsetConverter());
converterRegistry.addConverter(Charset.class, String.class, new ObjectToStringConverter());
converterRegistry.addConverter(new StringToCurrencyConverter());
converterRegistry.addConverter(Currency.class, String.class, new ObjectToStringConverter());
converterRegistry.addConverter(new StringToPropertiesConverter());
converterRegistry.addConverter(new PropertiesToStringConverter());
converterRegistry.addConverter(new StringToUUIDConverter());
converterRegistry.addConverter(UUID.class, String.class, new ObjectToStringConverter());
converterRegistry.addConverter(new StringToPatternConverter());
converterRegistry.addConverter(Pattern.class, String.class, new ObjectToStringConverter());
if (KotlinDetector.isKotlinPresent()) {
converterRegistry.addConverter(new StringToRegexConverter());
converterRegistry.addConverter(kotlin.text.Regex.class, String.class, new ObjectToStringConverter());
}
}
}
|
DefaultConversionService
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateProcedureTest11.java
|
{
"start": 924,
"end": 3087
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "CREATE PROCEDURE curdemo()\n" +
"BEGIN\n" +
" DECLARE done INT DEFAULT FALSE;\n" +
" DECLARE a CHAR(16);\n" +
" DECLARE b, c INT;\n" +
" DECLARE cur1 CURSOR FOR SELECT id,data FROM test.t1;\n" +
" DECLARE cur2 CURSOR FOR SELECT i FROM test.t2;\n" +
" DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;\n" +
"\n" +
" OPEN cur1;\n" +
" OPEN cur2;\n" +
"\n" +
" read_loop: LOOP\n" +
" FETCH cur1 INTO a, b;\n" +
" FETCH cur2 INTO c;\n" +
" IF done THEN\n" +
" LEAVE read_loop;\n" +
" END IF;\n" +
" IF b < c THEN\n" +
" INSERT INTO test.t3 VALUES (a,b);\n" +
" ELSE\n" +
" INSERT INTO test.t3 VALUES (a,c);\n" +
" END IF;\n" +
" END LOOP;\n" +
"\n" +
" CLOSE cur1;\n" +
" CLOSE cur2;\n" +
"END;";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLStatement stmt = statementList.get(0);
// print(statementList);
// assertEquals(1, statementList.size());
System.out.println(SQLUtils.toMySqlString(stmt));
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.MYSQL);
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(3, visitor.getTables().size());
assertEquals(3, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
}
}
|
MySqlCreateProcedureTest11
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java
|
{
"start": 23630,
"end": 24754
}
|
class ____ extends RstatementContext {
public TerminalNode WHILE() {
return getToken(PainlessParser.WHILE, 0);
}
public TerminalNode LP() {
return getToken(PainlessParser.LP, 0);
}
public ExpressionContext expression() {
return getRuleContext(ExpressionContext.class, 0);
}
public TerminalNode RP() {
return getToken(PainlessParser.RP, 0);
}
public TrailerContext trailer() {
return getRuleContext(TrailerContext.class, 0);
}
public EmptyContext empty() {
return getRuleContext(EmptyContext.class, 0);
}
public WhileContext(RstatementContext ctx) {
copyFrom(ctx);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor<? extends T>) visitor).visitWhile(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static
|
WhileContext
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 32