_id stringlengths 2 7 | title stringlengths 3 140 | partition stringclasses 3
values | text stringlengths 73 34.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q172000 | Configuration.validateUnblockTimeout | test | public static void validateUnblockTimeout(
final long publicationUnblockTimeoutNs, final long clientLivenessTimeoutNs, final long timerIntervalNs)
{
if (publicationUnblockTimeoutNs <= clientLivenessTimeoutNs)
{
throw new ConfigurationException(
"publicationUnblockTimeoutNs=" + publicationUnblockTimeoutNs +
" <= clientLivenessTimeoutNs=" + clientLivenessTimeoutNs);
}
if (clientLivenessTimeoutNs <= timerIntervalNs)
{
throw new ConfigurationException(
"clientLivenessTimeoutNs=" + clientLivenessTimeoutNs +
" <= timerIntervalNs=" + timerIntervalNs);
}
} | java | {
"resource": ""
} |
q172001 | ErrorResponseFlyweight.errorCode | test | public ErrorResponseFlyweight errorCode(final ErrorCode code)
{
buffer.putInt(offset + ERROR_CODE_OFFSET, code.value());
return this;
} | java | {
"resource": ""
} |
q172002 | ClusterMember.reset | test | public void reset()
{
isBallotSent = false;
isLeader = false;
hasRequestedJoin = false;
hasSentTerminationAck = false;
vote = null;
candidateTermId = Aeron.NULL_VALUE;
leadershipTermId = Aeron.NULL_VALUE;
logPosition = NULL_POSITION;
} | java | {
"resource": ""
} |
q172003 | ClusterMember.encodeAsString | test | public static String encodeAsString(final ClusterMember[] clusterMembers)
{
final StringBuilder builder = new StringBuilder();
for (int i = 0, length = clusterMembers.length; i < length; i++)
{
final ClusterMember member = clusterMembers[i];
builder
.append(member.id())
.append(',')
.append(member.endpointsDetail());
if ((length - 1) != i)
{
builder.append('|');
}
}
return builder.toString();
} | java | {
"resource": ""
} |
q172004 | ClusterMember.addMemberStatusPublications | test | public static void addMemberStatusPublications(
final ClusterMember[] members,
final ClusterMember exclude,
final ChannelUri channelUri,
final int streamId,
final Aeron aeron)
{
for (final ClusterMember member : members)
{
if (member != exclude)
{
channelUri.put(ENDPOINT_PARAM_NAME, member.memberFacingEndpoint());
member.publication = aeron.addExclusivePublication(channelUri.toString(), streamId);
}
}
} | java | {
"resource": ""
} |
q172005 | ClusterMember.closeMemberPublications | test | public static void closeMemberPublications(final ClusterMember[] clusterMembers)
{
for (final ClusterMember member : clusterMembers)
{
CloseHelper.close(member.publication);
}
} | java | {
"resource": ""
} |
q172006 | ClusterMember.hasActiveQuorum | test | public static boolean hasActiveQuorum(
final ClusterMember[] clusterMembers, final long nowMs, final long timeoutMs)
{
int threshold = quorumThreshold(clusterMembers.length);
for (final ClusterMember member : clusterMembers)
{
if (member.isLeader() || nowMs <= (member.timeOfLastAppendPositionMs() + timeoutMs))
{
if (--threshold <= 0)
{
return true;
}
}
}
return false;
} | java | {
"resource": ""
} |
q172007 | ClusterMember.quorumPosition | test | public static long quorumPosition(final ClusterMember[] members, final long[] rankedPositions)
{
final int length = rankedPositions.length;
for (int i = 0; i < length; i++)
{
rankedPositions[i] = 0;
}
for (final ClusterMember member : members)
{
long newPosition = member.logPosition;
for (int i = 0; i < length; i++)
{
final long rankedPosition = rankedPositions[i];
if (newPosition > rankedPosition)
{
rankedPositions[i] = newPosition;
newPosition = rankedPosition;
}
}
}
return rankedPositions[length - 1];
} | java | {
"resource": ""
} |
q172008 | ClusterMember.resetLogPositions | test | public static void resetLogPositions(final ClusterMember[] clusterMembers, final long logPosition)
{
for (final ClusterMember member : clusterMembers)
{
member.logPosition(logPosition);
}
} | java | {
"resource": ""
} |
q172009 | ClusterMember.haveVotersReachedPosition | test | public static boolean haveVotersReachedPosition(
final ClusterMember[] clusterMembers, final long position, final long leadershipTermId)
{
for (final ClusterMember member : clusterMembers)
{
if (member.vote != null && (member.logPosition < position || member.leadershipTermId != leadershipTermId))
{
return false;
}
}
return true;
} | java | {
"resource": ""
} |
q172010 | ClusterMember.hasWonVoteOnFullCount | test | public static boolean hasWonVoteOnFullCount(final ClusterMember[] members, final long candidateTermId)
{
int votes = 0;
for (final ClusterMember member : members)
{
if (null == member.vote || member.candidateTermId != candidateTermId)
{
return false;
}
votes += member.vote ? 1 : 0;
}
return votes >= ClusterMember.quorumThreshold(members.length);
} | java | {
"resource": ""
} |
q172011 | ClusterMember.hasMajorityVote | test | public static boolean hasMajorityVote(final ClusterMember[] clusterMembers, final long candidateTermId)
{
int votes = 0;
for (final ClusterMember member : clusterMembers)
{
if (Boolean.TRUE.equals(member.vote) && member.candidateTermId == candidateTermId)
{
++votes;
}
}
return votes >= ClusterMember.quorumThreshold(clusterMembers.length);
} | java | {
"resource": ""
} |
q172012 | ClusterMember.determineMember | test | public static ClusterMember determineMember(
final ClusterMember[] clusterMembers, final int memberId, final String memberEndpoints)
{
ClusterMember member = NULL_VALUE != memberId ? ClusterMember.findMember(clusterMembers, memberId) : null;
if ((null == clusterMembers || 0 == clusterMembers.length) && null == member)
{
member = ClusterMember.parseEndpoints(NULL_VALUE, memberEndpoints);
}
else
{
if (null == member)
{
throw new ClusterException("memberId=" + memberId + " not found in clusterMembers");
}
if (!"".equals(memberEndpoints))
{
ClusterMember.validateMemberEndpoints(member, memberEndpoints);
}
}
return member;
} | java | {
"resource": ""
} |
q172013 | ClusterMember.validateMemberEndpoints | test | public static void validateMemberEndpoints(final ClusterMember member, final String memberEndpoints)
{
final ClusterMember endpointMember = ClusterMember.parseEndpoints(Aeron.NULL_VALUE, memberEndpoints);
if (!areSameEndpoints(member, endpointMember))
{
throw new ClusterException(
"clusterMembers and memberEndpoints differ: " + member.endpointsDetail() + " != " + memberEndpoints);
}
} | java | {
"resource": ""
} |
q172014 | ClusterMember.areSameEndpoints | test | public static boolean areSameEndpoints(final ClusterMember lhs, final ClusterMember rhs)
{
return lhs.clientFacingEndpoint().equals(rhs.clientFacingEndpoint()) &&
lhs.memberFacingEndpoint().equals(rhs.memberFacingEndpoint()) &&
lhs.logEndpoint().equals(rhs.logEndpoint()) &&
lhs.transferEndpoint().equals(rhs.transferEndpoint()) &&
lhs.archiveEndpoint().equals(rhs.archiveEndpoint());
} | java | {
"resource": ""
} |
q172015 | ClusterMember.isUnanimousCandidate | test | public static boolean isUnanimousCandidate(final ClusterMember[] clusterMembers, final ClusterMember candidate)
{
for (final ClusterMember member : clusterMembers)
{
if (NULL_POSITION == member.logPosition || compareLog(candidate, member) < 0)
{
return false;
}
}
return true;
} | java | {
"resource": ""
} |
q172016 | ClusterMember.isQuorumCandidate | test | public static boolean isQuorumCandidate(final ClusterMember[] clusterMembers, final ClusterMember candidate)
{
int possibleVotes = 0;
for (final ClusterMember member : clusterMembers)
{
if (NULL_POSITION == member.logPosition || compareLog(candidate, member) < 0)
{
continue;
}
++possibleVotes;
}
return possibleVotes >= ClusterMember.quorumThreshold(clusterMembers.length);
} | java | {
"resource": ""
} |
q172017 | ClusterMember.isNotDuplicateEndpoints | test | public static boolean isNotDuplicateEndpoints(final ClusterMember[] members, final String memberEndpoints)
{
for (final ClusterMember member : members)
{
if (member.endpointsDetail().equals(memberEndpoints))
{
return false;
}
}
return true;
} | java | {
"resource": ""
} |
q172018 | ClusterMember.findMemberIndex | test | public static int findMemberIndex(final ClusterMember[] clusterMembers, final int memberId)
{
final int length = clusterMembers.length;
int index = ArrayUtil.UNKNOWN_INDEX;
for (int i = 0; i < length; i++)
{
if (clusterMembers[i].id() == memberId)
{
index = i;
}
}
return index;
} | java | {
"resource": ""
} |
q172019 | ClusterMember.removeMember | test | public static ClusterMember[] removeMember(final ClusterMember[] oldMembers, final int memberId)
{
return ArrayUtil.remove(oldMembers, findMemberIndex(oldMembers, memberId));
} | java | {
"resource": ""
} |
q172020 | ClusterMember.highMemberId | test | public static int highMemberId(final ClusterMember[] clusterMembers)
{
int highId = Aeron.NULL_VALUE;
for (final ClusterMember member : clusterMembers)
{
highId = Math.max(highId, member.id());
}
return highId;
} | java | {
"resource": ""
} |
q172021 | CommonContext.mapExistingCncFile | test | public MappedByteBuffer mapExistingCncFile(final Consumer<String> logger)
{
final File cncFile = new File(aeronDirectory, CncFileDescriptor.CNC_FILE);
if (cncFile.exists() && cncFile.length() > 0)
{
if (null != logger)
{
logger.accept("INFO: Aeron CnC file exists: " + cncFile);
}
return IoUtil.mapExistingFile(cncFile, CncFileDescriptor.CNC_FILE);
}
return null;
} | java | {
"resource": ""
} |
q172022 | CommonContext.isDriverActive | test | public static boolean isDriverActive(
final File directory, final long driverTimeoutMs, final Consumer<String> logger)
{
final File cncFile = new File(directory, CncFileDescriptor.CNC_FILE);
if (cncFile.exists() && cncFile.length() > 0)
{
logger.accept("INFO: Aeron CnC file exists: " + cncFile);
final MappedByteBuffer cncByteBuffer = IoUtil.mapExistingFile(cncFile, "CnC file");
try
{
return isDriverActive(driverTimeoutMs, logger, cncByteBuffer);
}
finally
{
IoUtil.unmap(cncByteBuffer);
}
}
return false;
} | java | {
"resource": ""
} |
q172023 | CommonContext.isDriverActive | test | public boolean isDriverActive(final long driverTimeoutMs, final Consumer<String> logger)
{
final MappedByteBuffer cncByteBuffer = mapExistingCncFile(logger);
try
{
return isDriverActive(driverTimeoutMs, logger, cncByteBuffer);
}
finally
{
IoUtil.unmap(cncByteBuffer);
}
} | java | {
"resource": ""
} |
q172024 | CommonContext.isDriverActive | test | public static boolean isDriverActive(
final long driverTimeoutMs, final Consumer<String> logger, final ByteBuffer cncByteBuffer)
{
if (null == cncByteBuffer)
{
return false;
}
final UnsafeBuffer cncMetaDataBuffer = CncFileDescriptor.createMetaDataBuffer(cncByteBuffer);
final long startTimeMs = System.currentTimeMillis();
int cncVersion;
while (0 == (cncVersion = cncMetaDataBuffer.getIntVolatile(CncFileDescriptor.cncVersionOffset(0))))
{
if (System.currentTimeMillis() > (startTimeMs + driverTimeoutMs))
{
throw new DriverTimeoutException("CnC file is created but not initialised.");
}
sleep(1);
}
if (CNC_VERSION != cncVersion)
{
throw new AeronException(
"Aeron CnC version does not match: required=" + CNC_VERSION + " version=" + cncVersion);
}
final ManyToOneRingBuffer toDriverBuffer = new ManyToOneRingBuffer(
CncFileDescriptor.createToDriverBuffer(cncByteBuffer, cncMetaDataBuffer));
final long timestamp = toDriverBuffer.consumerHeartbeatTime();
final long now = System.currentTimeMillis();
final long timestampAge = now - timestamp;
logger.accept("INFO: Aeron toDriver consumer heartbeat is (ms): " + timestampAge);
return timestampAge <= driverTimeoutMs;
} | java | {
"resource": ""
} |
q172025 | CommonContext.requestDriverTermination | test | public static boolean requestDriverTermination(
final File directory,
final DirectBuffer tokenBuffer,
final int tokenOffset,
final int tokenLength)
{
final File cncFile = new File(directory, CncFileDescriptor.CNC_FILE);
if (cncFile.exists() && cncFile.length() > 0)
{
final MappedByteBuffer cncByteBuffer = IoUtil.mapExistingFile(cncFile, "CnC file");
try
{
final UnsafeBuffer cncMetaDataBuffer = CncFileDescriptor.createMetaDataBuffer(cncByteBuffer);
final int cncVersion = cncMetaDataBuffer.getIntVolatile(cncVersionOffset(0));
if (CncFileDescriptor.CNC_VERSION != cncVersion)
{
throw new AeronException(
"Aeron CnC version does not match: required=" + CNC_VERSION + " version=" + cncVersion);
}
final ManyToOneRingBuffer toDriverBuffer = new ManyToOneRingBuffer(
CncFileDescriptor.createToDriverBuffer(cncByteBuffer, cncMetaDataBuffer));
final long clientId = toDriverBuffer.nextCorrelationId();
final DriverProxy driverProxy = new DriverProxy(toDriverBuffer, clientId);
return driverProxy.terminateDriver(tokenBuffer, tokenOffset, tokenLength);
}
finally
{
IoUtil.unmap(cncByteBuffer);
}
}
return false;
} | java | {
"resource": ""
} |
q172026 | FrameDescriptor.frameLengthVolatile | test | public static int frameLengthVolatile(final UnsafeBuffer buffer, final int termOffset)
{
int frameLength = buffer.getIntVolatile(termOffset);
if (ByteOrder.nativeOrder() != LITTLE_ENDIAN)
{
frameLength = Integer.reverseBytes(frameLength);
}
return frameLength;
} | java | {
"resource": ""
} |
q172027 | FrameDescriptor.frameLengthOrdered | test | public static void frameLengthOrdered(final UnsafeBuffer buffer, final int termOffset, final int frameLength)
{
int length = frameLength;
if (ByteOrder.nativeOrder() != LITTLE_ENDIAN)
{
length = Integer.reverseBytes(frameLength);
}
buffer.putIntOrdered(termOffset, length);
} | java | {
"resource": ""
} |
q172028 | FrameDescriptor.frameType | test | public static void frameType(final UnsafeBuffer buffer, final int termOffset, final int type)
{
buffer.putShort(typeOffset(termOffset), (short)type, LITTLE_ENDIAN);
} | java | {
"resource": ""
} |
q172029 | FrameDescriptor.frameFlags | test | public static void frameFlags(final UnsafeBuffer buffer, final int termOffset, final byte flags)
{
buffer.putByte(flagsOffset(termOffset), flags);
} | java | {
"resource": ""
} |
q172030 | FrameDescriptor.frameTermOffset | test | public static void frameTermOffset(final UnsafeBuffer buffer, final int termOffset)
{
buffer.putInt(termOffsetOffset(termOffset), termOffset, LITTLE_ENDIAN);
} | java | {
"resource": ""
} |
q172031 | FrameDescriptor.frameTermId | test | public static void frameTermId(final UnsafeBuffer buffer, final int termOffset, final int termId)
{
buffer.putInt(termIdOffset(termOffset), termId, LITTLE_ENDIAN);
} | java | {
"resource": ""
} |
q172032 | RecordingPos.findCounterIdByRecording | test | public static int findCounterIdByRecording(final CountersReader countersReader, final long recordingId)
{
final DirectBuffer buffer = countersReader.metaDataBuffer();
for (int i = 0, size = countersReader.maxCounterId(); i < size; i++)
{
if (countersReader.getCounterState(i) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(i);
if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECORDING_POSITION_TYPE_ID &&
buffer.getLong(recordOffset + KEY_OFFSET + RECORDING_ID_OFFSET) == recordingId)
{
return i;
}
}
}
return NULL_COUNTER_ID;
} | java | {
"resource": ""
} |
q172033 | RecordingPos.findCounterIdBySession | test | public static int findCounterIdBySession(final CountersReader countersReader, final int sessionId)
{
final DirectBuffer buffer = countersReader.metaDataBuffer();
for (int i = 0, size = countersReader.maxCounterId(); i < size; i++)
{
if (countersReader.getCounterState(i) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(i);
if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECORDING_POSITION_TYPE_ID &&
buffer.getInt(recordOffset + KEY_OFFSET + SESSION_ID_OFFSET) == sessionId)
{
return i;
}
}
}
return NULL_COUNTER_ID;
} | java | {
"resource": ""
} |
q172034 | RecordingPos.getRecordingId | test | public static long getRecordingId(final CountersReader countersReader, final int counterId)
{
final DirectBuffer buffer = countersReader.metaDataBuffer();
if (countersReader.getCounterState(counterId) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(counterId);
if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECORDING_POSITION_TYPE_ID)
{
return buffer.getLong(recordOffset + KEY_OFFSET + RECORDING_ID_OFFSET);
}
}
return NULL_RECORDING_ID;
} | java | {
"resource": ""
} |
q172035 | RecordingPos.isActive | test | public static boolean isActive(final CountersReader countersReader, final int counterId, final long recordingId)
{
final DirectBuffer buffer = countersReader.metaDataBuffer();
if (countersReader.getCounterState(counterId) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(counterId);
return
buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECORDING_POSITION_TYPE_ID &&
buffer.getLong(recordOffset + KEY_OFFSET + RECORDING_ID_OFFSET) == recordingId;
}
return false;
} | java | {
"resource": ""
} |
q172036 | StatusMessageFlyweight.applicationSpecificFeedback | test | public StatusMessageFlyweight applicationSpecificFeedback(final byte[] source, final int offset, final int length)
{
frameLength(HEADER_LENGTH + length);
putBytes(APP_SPECIFIC_FEEDBACK_FIELD_OFFSET, source, offset, length);
return this;
} | java | {
"resource": ""
} |
q172037 | RecordingLog.reload | test | public void reload()
{
entries.clear();
indexByLeadershipTermIdMap.clear();
indexByLeadershipTermIdMap.compact();
nextEntryIndex = 0;
byteBuffer.clear();
try
{
while (true)
{
final int bytes = fileChannel.read(byteBuffer);
if (byteBuffer.remaining() == 0)
{
byteBuffer.flip();
captureEntriesFromBuffer(byteBuffer, buffer, entries);
byteBuffer.clear();
}
if (-1 == bytes)
{
if (byteBuffer.position() > 0)
{
byteBuffer.flip();
captureEntriesFromBuffer(byteBuffer, buffer, entries);
byteBuffer.clear();
}
break;
}
}
}
catch (final IOException ex)
{
LangUtil.rethrowUnchecked(ex);
}
} | java | {
"resource": ""
} |
q172038 | RecordingLog.findLastTerm | test | public Entry findLastTerm()
{
for (int i = entries.size() - 1; i >= 0; i--)
{
final Entry entry = entries.get(i);
if (ENTRY_TYPE_TERM == entry.type)
{
return entry;
}
}
return null;
} | java | {
"resource": ""
} |
q172039 | RecordingLog.createRecoveryPlan | test | public RecoveryPlan createRecoveryPlan(final AeronArchive archive, final int serviceCount)
{
final ArrayList<Snapshot> snapshots = new ArrayList<>();
final ArrayList<Log> logs = new ArrayList<>();
planRecovery(snapshots, logs, entries, archive, serviceCount);
long lastLeadershipTermId = NULL_VALUE;
long lastTermBaseLogPosition = 0;
long committedLogPosition = -1;
long appendedLogPosition = 0;
final int snapshotStepsSize = snapshots.size();
if (snapshotStepsSize > 0)
{
final Snapshot snapshot = snapshots.get(0);
lastLeadershipTermId = snapshot.leadershipTermId;
lastTermBaseLogPosition = snapshot.termBaseLogPosition;
appendedLogPosition = snapshot.logPosition;
committedLogPosition = snapshot.logPosition;
}
if (!logs.isEmpty())
{
final Log log = logs.get(0);
lastLeadershipTermId = log.leadershipTermId;
lastTermBaseLogPosition = log.termBaseLogPosition;
appendedLogPosition = log.stopPosition;
committedLogPosition = log.logPosition;
}
return new RecoveryPlan(
lastLeadershipTermId,
lastTermBaseLogPosition,
appendedLogPosition,
committedLogPosition,
snapshots,
logs);
} | java | {
"resource": ""
} |
q172040 | RecordingLog.createRecoveryPlan | test | public static RecoveryPlan createRecoveryPlan(final ArrayList<RecordingLog.Snapshot> snapshots)
{
long lastLeadershipTermId = NULL_VALUE;
long lastTermBaseLogPosition = 0;
long committedLogPosition = -1;
long appendedLogPosition = 0;
final int snapshotStepsSize = snapshots.size();
if (snapshotStepsSize > 0)
{
final Snapshot snapshot = snapshots.get(0);
lastLeadershipTermId = snapshot.leadershipTermId;
lastTermBaseLogPosition = snapshot.termBaseLogPosition;
appendedLogPosition = snapshot.logPosition;
committedLogPosition = snapshot.logPosition;
}
return new RecoveryPlan(
lastLeadershipTermId,
lastTermBaseLogPosition,
appendedLogPosition,
committedLogPosition,
snapshots,
new ArrayList<>());
} | java | {
"resource": ""
} |
q172041 | RecordingLog.appendTerm | test | public void appendTerm(
final long recordingId, final long leadershipTermId, final long termBaseLogPosition, final long timestamp)
{
final int size = entries.size();
if (size > 0)
{
final Entry lastEntry = entries.get(size - 1);
if (lastEntry.type != NULL_VALUE && lastEntry.leadershipTermId >= leadershipTermId)
{
throw new ClusterException("leadershipTermId out of sequence: previous " +
lastEntry.leadershipTermId + " this " + leadershipTermId);
}
}
indexByLeadershipTermIdMap.put(leadershipTermId, nextEntryIndex);
append(
ENTRY_TYPE_TERM,
recordingId,
leadershipTermId,
termBaseLogPosition,
NULL_POSITION,
timestamp,
NULL_VALUE);
} | java | {
"resource": ""
} |
q172042 | RecordingLog.appendSnapshot | test | public void appendSnapshot(
final long recordingId,
final long leadershipTermId,
final long termBaseLogPosition,
final long logPosition,
final long timestamp,
final int serviceId)
{
final int size = entries.size();
if (size > 0)
{
final Entry entry = entries.get(size - 1);
if (entry.type == ENTRY_TYPE_TERM && entry.leadershipTermId != leadershipTermId)
{
throw new ClusterException("leadershipTermId out of sequence: previous " +
entry.leadershipTermId + " this " + leadershipTermId);
}
}
append(
ENTRY_TYPE_SNAPSHOT,
recordingId,
leadershipTermId,
termBaseLogPosition,
logPosition,
timestamp,
serviceId);
} | java | {
"resource": ""
} |
q172043 | RecordingLog.commitLogPosition | test | public void commitLogPosition(final long leadershipTermId, final long logPosition)
{
final int index = getLeadershipTermEntryIndex(leadershipTermId);
commitEntryValue(index, logPosition, LOG_POSITION_OFFSET);
final Entry entry = entries.get(index);
entries.set(index, new Entry(
entry.recordingId,
entry.leadershipTermId,
entry.termBaseLogPosition,
logPosition,
entry.timestamp,
entry.serviceId,
entry.type,
entry.entryIndex));
} | java | {
"resource": ""
} |
q172044 | RecordingLog.tombstoneEntry | test | public void tombstoneEntry(final long leadershipTermId, final int entryIndex)
{
int index = -1;
for (int i = 0, size = entries.size(); i < size; i++)
{
final Entry entry = entries.get(i);
if (entry.leadershipTermId == leadershipTermId && entry.entryIndex == entryIndex)
{
index = entry.entryIndex;
if (ENTRY_TYPE_TERM == entry.type)
{
indexByLeadershipTermIdMap.remove(leadershipTermId);
}
break;
}
}
if (-1 == index)
{
throw new ClusterException("unknown entry index: " + entryIndex);
}
buffer.putInt(0, NULL_VALUE, LITTLE_ENDIAN);
byteBuffer.limit(SIZE_OF_INT).position(0);
final long filePosition = (index * (long)ENTRY_LENGTH) + ENTRY_TYPE_OFFSET;
try
{
if (SIZE_OF_INT != fileChannel.write(byteBuffer, filePosition))
{
throw new ClusterException("failed to write field atomically");
}
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
} | java | {
"resource": ""
} |
q172045 | AeronCluster.close | test | public void close()
{
if (null != publication && publication.isConnected())
{
closeSession();
}
if (!ctx.ownsAeronClient())
{
CloseHelper.close(subscription);
CloseHelper.close(publication);
}
ctx.close();
} | java | {
"resource": ""
} |
q172046 | AeronCluster.offer | test | public long offer(final DirectBufferVector[] vectors)
{
if (headerVector != vectors[0])
{
vectors[0] = headerVector;
}
return publication.offer(vectors, null);
} | java | {
"resource": ""
} |
q172047 | LogBufferUnblocker.unblock | test | public static boolean unblock(
final UnsafeBuffer[] termBuffers,
final UnsafeBuffer logMetaDataBuffer,
final long blockedPosition,
final int termLength)
{
final int positionBitsToShift = LogBufferDescriptor.positionBitsToShift(termLength);
final int blockedTermCount = (int)(blockedPosition >> positionBitsToShift);
final int blockedOffset = (int)blockedPosition & (termLength - 1);
final int activeTermCount = activeTermCount(logMetaDataBuffer);
if (activeTermCount == (blockedTermCount - 1) && blockedOffset == 0)
{
final int currentTermId = termId(rawTailVolatile(logMetaDataBuffer, indexByTermCount(activeTermCount)));
return rotateLog(logMetaDataBuffer, activeTermCount, currentTermId);
}
final int blockedIndex = indexByTermCount(blockedTermCount);
final long rawTail = rawTailVolatile(logMetaDataBuffer, blockedIndex);
final int termId = termId(rawTail);
final int tailOffset = termOffset(rawTail, termLength);
final UnsafeBuffer termBuffer = termBuffers[blockedIndex];
switch (TermUnblocker.unblock(logMetaDataBuffer, termBuffer, blockedOffset, tailOffset, termId))
{
case UNBLOCKED_TO_END:
rotateLog(logMetaDataBuffer, blockedTermCount, termId);
// fall through
case UNBLOCKED:
return true;
}
return false;
} | java | {
"resource": ""
} |
q172048 | ImageMessageFlyweight.channel | test | public String channel()
{
final int length = buffer.getInt(offset + CHANNEL_OFFSET);
lengthOfChannel = SIZE_OF_INT + length;
return buffer.getStringAscii(offset + CHANNEL_OFFSET, length);
} | java | {
"resource": ""
} |
q172049 | ImageMessageFlyweight.channel | test | public ImageMessageFlyweight channel(final String channel)
{
lengthOfChannel = buffer.putStringAscii(offset + CHANNEL_OFFSET, channel);
return this;
} | java | {
"resource": ""
} |
q172050 | LogBufferDescriptor.checkTermLength | test | public static void checkTermLength(final int termLength)
{
if (termLength < TERM_MIN_LENGTH)
{
throw new IllegalStateException(
"Term length less than min length of " + TERM_MIN_LENGTH + ": length=" + termLength);
}
if (termLength > TERM_MAX_LENGTH)
{
throw new IllegalStateException(
"Term length more than max length of " + TERM_MAX_LENGTH + ": length=" + termLength);
}
if (!BitUtil.isPowerOfTwo(termLength))
{
throw new IllegalStateException("Term length not a power of 2: length=" + termLength);
}
} | java | {
"resource": ""
} |
q172051 | LogBufferDescriptor.checkPageSize | test | public static void checkPageSize(final int pageSize)
{
if (pageSize < PAGE_MIN_SIZE)
{
throw new IllegalStateException(
"Page size less than min size of " + PAGE_MIN_SIZE + ": page size=" + pageSize);
}
if (pageSize > PAGE_MAX_SIZE)
{
throw new IllegalStateException(
"Page size more than max size of " + PAGE_MAX_SIZE + ": page size=" + pageSize);
}
if (!BitUtil.isPowerOfTwo(pageSize))
{
throw new IllegalStateException("Page size not a power of 2: page size=" + pageSize);
}
} | java | {
"resource": ""
} |
q172052 | LogBufferDescriptor.casActiveTermCount | test | public static boolean casActiveTermCount(
final UnsafeBuffer metadataBuffer, final int expectedTermCount, final int updateTermCount)
{
return metadataBuffer.compareAndSetInt(LOG_ACTIVE_TERM_COUNT_OFFSET, expectedTermCount, updateTermCount);
} | java | {
"resource": ""
} |
q172053 | LogBufferDescriptor.computePosition | test | public static long computePosition(
final int activeTermId, final int termOffset, final int positionBitsToShift, final int initialTermId)
{
final long termCount = activeTermId - initialTermId; // copes with negative activeTermId on rollover
return (termCount << positionBitsToShift) + termOffset;
} | java | {
"resource": ""
} |
q172054 | LogBufferDescriptor.computeLogLength | test | public static long computeLogLength(final int termLength, final int filePageSize)
{
if (termLength < (1024 * 1024 * 1024))
{
return align((termLength * PARTITION_COUNT) + LOG_META_DATA_LENGTH, filePageSize);
}
return (PARTITION_COUNT * (long)termLength) + align(LOG_META_DATA_LENGTH, filePageSize);
} | java | {
"resource": ""
} |
q172055 | LogBufferDescriptor.storeDefaultFrameHeader | test | public static void storeDefaultFrameHeader(final UnsafeBuffer metadataBuffer, final DirectBuffer defaultHeader)
{
if (defaultHeader.capacity() != HEADER_LENGTH)
{
throw new IllegalArgumentException(
"Default header length not equal to HEADER_LENGTH: length=" + defaultHeader.capacity());
}
metadataBuffer.putInt(LOG_DEFAULT_FRAME_HEADER_LENGTH_OFFSET, HEADER_LENGTH);
metadataBuffer.putBytes(LOG_DEFAULT_FRAME_HEADER_OFFSET, defaultHeader, 0, HEADER_LENGTH);
} | java | {
"resource": ""
} |
q172056 | LogBufferDescriptor.applyDefaultHeader | test | public static void applyDefaultHeader(
final UnsafeBuffer metadataBuffer, final UnsafeBuffer termBuffer, final int termOffset)
{
termBuffer.putBytes(termOffset, metadataBuffer, LOG_DEFAULT_FRAME_HEADER_OFFSET, HEADER_LENGTH);
} | java | {
"resource": ""
} |
q172057 | LogBufferDescriptor.rotateLog | test | public static boolean rotateLog(final UnsafeBuffer metadataBuffer, final int termCount, final int termId)
{
final int nextTermId = termId + 1;
final int nextTermCount = termCount + 1;
final int nextIndex = indexByTermCount(nextTermCount);
final int expectedTermId = nextTermId - PARTITION_COUNT;
long rawTail;
do
{
rawTail = rawTail(metadataBuffer, nextIndex);
if (expectedTermId != termId(rawTail))
{
break;
}
}
while (!casRawTail(metadataBuffer, nextIndex, rawTail, packTail(nextTermId, 0)));
return casActiveTermCount(metadataBuffer, termCount, nextTermCount);
} | java | {
"resource": ""
} |
q172058 | LogBufferDescriptor.initialiseTailWithTermId | test | public static void initialiseTailWithTermId(
final UnsafeBuffer metadataBuffer, final int partitionIndex, final int termId)
{
metadataBuffer.putLong(TERM_TAIL_COUNTERS_OFFSET + (partitionIndex * SIZE_OF_LONG), packTail(termId, 0));
} | java | {
"resource": ""
} |
q172059 | LogBufferDescriptor.termOffset | test | public static int termOffset(final long rawTail, final long termLength)
{
final long tail = rawTail & 0xFFFF_FFFFL;
return (int)Math.min(tail, termLength);
} | java | {
"resource": ""
} |
q172060 | LogBufferDescriptor.rawTailVolatile | test | public static long rawTailVolatile(final UnsafeBuffer metadataBuffer)
{
final int partitionIndex = indexByTermCount(activeTermCount(metadataBuffer));
return metadataBuffer.getLongVolatile(TERM_TAIL_COUNTERS_OFFSET + (SIZE_OF_LONG * partitionIndex));
} | java | {
"resource": ""
} |
q172061 | LogBufferDescriptor.casRawTail | test | public static boolean casRawTail(
final UnsafeBuffer metadataBuffer,
final int partitionIndex,
final long expectedRawTail,
final long updateRawTail)
{
final int index = TERM_TAIL_COUNTERS_OFFSET + (SIZE_OF_LONG * partitionIndex);
return metadataBuffer.compareAndSetLong(index, expectedRawTail, updateRawTail);
} | java | {
"resource": ""
} |
q172062 | TerminateDriverFlyweight.tokenBuffer | test | public TerminateDriverFlyweight tokenBuffer(
final DirectBuffer tokenBuffer, final int tokenOffset, final int tokenLength)
{
buffer.putInt(TOKEN_LENGTH_OFFSET, tokenLength);
if (null != tokenBuffer && tokenLength > 0)
{
buffer.putBytes(tokenBufferOffset(), tokenBuffer, tokenOffset, tokenLength);
}
return this;
} | java | {
"resource": ""
} |
q172063 | RecoveryState.allocate | test | public static Counter allocate(
final Aeron aeron,
final MutableDirectBuffer tempBuffer,
final long leadershipTermId,
final long logPosition,
final long timestamp,
final boolean hasReplay,
final long... snapshotRecordingIds)
{
tempBuffer.putLong(LEADERSHIP_TERM_ID_OFFSET, leadershipTermId);
tempBuffer.putLong(LOG_POSITION_OFFSET, logPosition);
tempBuffer.putLong(TIMESTAMP_OFFSET, timestamp);
tempBuffer.putInt(REPLAY_FLAG_OFFSET, hasReplay ? 1 : 0);
final int serviceCount = snapshotRecordingIds.length;
tempBuffer.putInt(SERVICE_COUNT_OFFSET, serviceCount);
final int keyLength = SNAPSHOT_RECORDING_IDS_OFFSET + (serviceCount * SIZE_OF_LONG);
if (keyLength > MAX_KEY_LENGTH)
{
throw new ClusterException(keyLength + " exceeds max key length " + MAX_KEY_LENGTH);
}
for (int i = 0; i < serviceCount; i++)
{
tempBuffer.putLong(SNAPSHOT_RECORDING_IDS_OFFSET + (i * SIZE_OF_LONG), snapshotRecordingIds[i]);
}
final int labelOffset = BitUtil.align(keyLength, SIZE_OF_INT);
int labelLength = 0;
labelLength += tempBuffer.putStringWithoutLengthAscii(labelOffset + labelLength, NAME);
labelLength += tempBuffer.putLongAscii(keyLength + labelLength, leadershipTermId);
labelLength += tempBuffer.putStringWithoutLengthAscii(labelOffset + labelLength, " logPosition=");
labelLength += tempBuffer.putLongAscii(labelOffset + labelLength, logPosition);
labelLength += tempBuffer.putStringWithoutLengthAscii(labelOffset + labelLength, " hasReplay=" + hasReplay);
return aeron.addCounter(RECOVERY_STATE_TYPE_ID, tempBuffer, 0, keyLength, tempBuffer, labelOffset, labelLength);
} | java | {
"resource": ""
} |
q172064 | RecoveryState.findCounterId | test | public static int findCounterId(final CountersReader counters)
{
final DirectBuffer buffer = counters.metaDataBuffer();
for (int i = 0, size = counters.maxCounterId(); i < size; i++)
{
if (counters.getCounterState(i) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(i);
if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECOVERY_STATE_TYPE_ID)
{
return i;
}
}
}
return NULL_COUNTER_ID;
} | java | {
"resource": ""
} |
q172065 | RecoveryState.hasReplay | test | public static boolean hasReplay(final CountersReader counters, final int counterId)
{
final DirectBuffer buffer = counters.metaDataBuffer();
if (counters.getCounterState(counterId) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(counterId);
if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECOVERY_STATE_TYPE_ID)
{
return buffer.getInt(recordOffset + KEY_OFFSET + REPLAY_FLAG_OFFSET) == 1;
}
}
return false;
} | java | {
"resource": ""
} |
q172066 | RecoveryState.getSnapshotRecordingId | test | public static long getSnapshotRecordingId(final CountersReader counters, final int counterId, final int serviceId)
{
final DirectBuffer buffer = counters.metaDataBuffer();
if (counters.getCounterState(counterId) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(counterId);
if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECOVERY_STATE_TYPE_ID)
{
final int serviceCount = buffer.getInt(recordOffset + KEY_OFFSET + SERVICE_COUNT_OFFSET);
if (serviceId < 0 || serviceId >= serviceCount)
{
throw new ClusterException("invalid serviceId " + serviceId + " for count of " + serviceCount);
}
return buffer.getLong(
recordOffset + KEY_OFFSET + SNAPSHOT_RECORDING_IDS_OFFSET + (serviceId * SIZE_OF_LONG));
}
}
throw new ClusterException("Active counter not found " + counterId);
} | java | {
"resource": ""
} |
q172067 | HeaderFlyweight.flagsToChars | test | public static char[] flagsToChars(final short flags)
{
final char[] chars = new char[]{ '0', '0', '0', '0', '0', '0', '0', '0' };
final int length = chars.length;
short mask = (short)(1 << (length - 1));
for (int i = 0; i < length; i++)
{
if ((flags & mask) == mask)
{
chars[i] = '1';
}
mask >>= 1;
}
return chars;
} | java | {
"resource": ""
} |
q172068 | PublicationMessageFlyweight.channel | test | public PublicationMessageFlyweight channel(final String channel)
{
lengthOfChannel = buffer.putStringAscii(offset + CHANNEL_OFFSET, channel);
return this;
} | java | {
"resource": ""
} |
q172069 | ClientSession.offer | test | public long offer(final DirectBuffer buffer, final int offset, final int length)
{
return cluster.offer(id, responsePublication, buffer, offset, length);
} | java | {
"resource": ""
} |
q172070 | BufferClaim.wrap | test | public final void wrap(final AtomicBuffer buffer, final int offset, final int length)
{
this.buffer.wrap(buffer, offset, length);
} | java | {
"resource": ""
} |
q172071 | BufferClaim.commit | test | public final void commit()
{
int frameLength = buffer.capacity();
if (ByteOrder.nativeOrder() != LITTLE_ENDIAN)
{
frameLength = Integer.reverseBytes(frameLength);
}
buffer.putIntOrdered(FRAME_LENGTH_FIELD_OFFSET, frameLength);
} | java | {
"resource": ""
} |
q172072 | BufferClaim.abort | test | public final void abort()
{
int frameLength = buffer.capacity();
if (ByteOrder.nativeOrder() != LITTLE_ENDIAN)
{
frameLength = Integer.reverseBytes(frameLength);
}
buffer.putShort(TYPE_FIELD_OFFSET, (short)HDR_TYPE_PAD, LITTLE_ENDIAN);
buffer.putIntOrdered(FRAME_LENGTH_FIELD_OFFSET, frameLength);
} | java | {
"resource": ""
} |
q172073 | MediaDriver.main | test | public static void main(final String[] args)
{
loadPropertiesFiles(args);
final ShutdownSignalBarrier barrier = new ShutdownSignalBarrier();
final MediaDriver.Context ctx = new MediaDriver.Context();
ctx.terminationHook(barrier::signal);
try (MediaDriver ignore = MediaDriver.launch(ctx))
{
barrier.await();
System.out.println("Shutdown Driver...");
}
} | java | {
"resource": ""
} |
q172074 | MediaDriver.close | test | public void close()
{
CloseHelper.close(sharedRunner);
CloseHelper.close(sharedNetworkRunner);
CloseHelper.close(receiverRunner);
CloseHelper.close(senderRunner);
CloseHelper.close(conductorRunner);
CloseHelper.close(sharedInvoker);
if (ctx.useWindowsHighResTimer() && SystemUtil.osName().startsWith("win"))
{
if (!wasHighResTimerEnabled)
{
HighResolutionTimer.disable();
}
}
} | java | {
"resource": ""
} |
q172075 | Header.position | test | public final long position()
{
final int resultingOffset = BitUtil.align(termOffset() + frameLength(), FRAME_ALIGNMENT);
return computePosition(termId(), resultingOffset, positionBitsToShift, initialTermId);
} | java | {
"resource": ""
} |
q172076 | ServiceHeartbeat.allocate | test | public static Counter allocate(
final Aeron aeron,
final MutableDirectBuffer tempBuffer,
final int serviceId)
{
tempBuffer.putInt(SERVICE_ID_OFFSET, serviceId);
final int labelOffset = BitUtil.align(KEY_LENGTH, SIZE_OF_INT);
int labelLength = 0;
labelLength += tempBuffer.putStringWithoutLengthAscii(labelOffset + labelLength, NAME);
labelLength += tempBuffer.putIntAscii(labelOffset + labelLength, serviceId);
return aeron.addCounter(
SERVICE_HEARTBEAT_TYPE_ID, tempBuffer, 0, KEY_LENGTH, tempBuffer, labelOffset, labelLength);
} | java | {
"resource": ""
} |
q172077 | ServiceHeartbeat.findCounterId | test | public static int findCounterId(final CountersReader counters, final int serviceId)
{
final DirectBuffer buffer = counters.metaDataBuffer();
for (int i = 0, size = counters.maxCounterId(); i < size; i++)
{
if (counters.getCounterState(i) == RECORD_ALLOCATED)
{
final int recordOffset = CountersReader.metaDataOffset(i);
if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == SERVICE_HEARTBEAT_TYPE_ID &&
buffer.getInt(recordOffset + KEY_OFFSET + SERVICE_ID_OFFSET) == serviceId)
{
return i;
}
}
}
return NULL_COUNTER_ID;
} | java | {
"resource": ""
} |
q172078 | BacklogStat.snapshot | test | public Map<StreamCompositeKey, StreamBacklog> snapshot()
{
final Map<StreamCompositeKey, StreamBacklog> streams = new HashMap<>();
counters.forEach(
(counterId, typeId, keyBuffer, label) ->
{
if ((typeId >= PUBLISHER_LIMIT_TYPE_ID && typeId <= RECEIVER_POS_TYPE_ID) ||
typeId == SENDER_LIMIT_TYPE_ID || typeId == PER_IMAGE_TYPE_ID || typeId == PUBLISHER_POS_TYPE_ID)
{
final StreamCompositeKey key = new StreamCompositeKey(
keyBuffer.getInt(SESSION_ID_OFFSET),
keyBuffer.getInt(STREAM_ID_OFFSET),
keyBuffer.getStringAscii(CHANNEL_OFFSET));
final StreamBacklog streamBacklog = streams.computeIfAbsent(key, (ignore) -> new StreamBacklog());
final long registrationId = keyBuffer.getLong(REGISTRATION_ID_OFFSET);
final long value = counters.getCounterValue(counterId);
switch (typeId)
{
case PublisherLimit.PUBLISHER_LIMIT_TYPE_ID:
streamBacklog.createPublisherIfAbsent().registrationId(registrationId);
streamBacklog.createPublisherIfAbsent().limit(value);
break;
case PublisherPos.PUBLISHER_POS_TYPE_ID:
streamBacklog.createPublisherIfAbsent().registrationId(registrationId);
streamBacklog.createPublisherIfAbsent().position(value);
break;
case SenderPos.SENDER_POSITION_TYPE_ID:
streamBacklog.createSenderIfAbsent().registrationId(registrationId);
streamBacklog.createSenderIfAbsent().position(value);
break;
case SenderLimit.SENDER_LIMIT_TYPE_ID:
streamBacklog.createSenderIfAbsent().registrationId(registrationId);
streamBacklog.createSenderIfAbsent().limit(value);
break;
case ReceiverHwm.RECEIVER_HWM_TYPE_ID:
streamBacklog.createReceiverIfAbsent().registrationId(registrationId);
streamBacklog.createReceiverIfAbsent().highWaterMark(value);
break;
case ReceiverPos.RECEIVER_POS_TYPE_ID:
streamBacklog.createReceiverIfAbsent().registrationId(registrationId);
streamBacklog.createReceiverIfAbsent().position(value);
break;
case SubscriberPos.SUBSCRIBER_POSITION_TYPE_ID:
streamBacklog.subscriberBacklogs().put(registrationId, new Subscriber(value));
break;
}
}
});
return streams;
} | java | {
"resource": ""
} |
q172079 | HeartbeatStatus.allocate | test | public static AtomicCounter allocate(
final MutableDirectBuffer tempBuffer,
final String name,
final int typeId,
final CountersManager countersManager,
final long registrationId)
{
return new AtomicCounter(
countersManager.valuesBuffer(),
allocateCounterId(tempBuffer, name, typeId, countersManager, registrationId),
countersManager);
} | java | {
"resource": ""
} |
q172080 | Context.close | test | public void close()
{
final MappedByteBuffer cncByteBuffer = this.cncByteBuffer;
this.cncByteBuffer = null;
IoUtil.unmap(cncByteBuffer);
super.close();
} | java | {
"resource": ""
} |
q172081 | ControlResponseAdapter.dispatchDescriptor | test | public static void dispatchDescriptor(
final RecordingDescriptorDecoder decoder, final RecordingDescriptorConsumer consumer)
{
consumer.onRecordingDescriptor(
decoder.controlSessionId(),
decoder.correlationId(),
decoder.recordingId(),
decoder.startTimestamp(),
decoder.stopTimestamp(),
decoder.startPosition(),
decoder.stopPosition(),
decoder.initialTermId(),
decoder.segmentFileLength(),
decoder.termBufferLength(),
decoder.mtuLength(),
decoder.sessionId(),
decoder.streamId(),
decoder.strippedChannel(),
decoder.originalChannel(),
decoder.sourceIdentity());
} | java | {
"resource": ""
} |
q172082 | CapacityByteArrayOutputStream.addSlab | test | private void addSlab(int minimumSize) {
int nextSlabSize;
if (bytesUsed == 0) {
nextSlabSize = initialSlabSize;
} else if (bytesUsed > maxCapacityHint / 5) {
// to avoid an overhead of up to twice the needed size, we get linear when approaching target page size
nextSlabSize = maxCapacityHint / 5;
} else {
// double the size every time
nextSlabSize = bytesUsed;
}
if (nextSlabSize < minimumSize) {
LOG.debug("slab size {} too small for value of size {}. Bumping up slab size", nextSlabSize, minimumSize);
nextSlabSize = minimumSize;
}
LOG.debug("used {} slabs, adding new slab of size {}", slabs.size(), nextSlabSize);
this.currentSlab = allocator.allocate(nextSlabSize);
this.slabs.add(currentSlab);
this.bytesAllocated += nextSlabSize;
this.currentSlabIndex = 0;
} | java | {
"resource": ""
} |
q172083 | CapacityByteArrayOutputStream.setByte | test | public void setByte(long index, byte value) {
checkArgument(index < bytesUsed, "Index: " + index + " is >= the current size of: " + bytesUsed);
long seen = 0;
for (int i = 0; i < slabs.size(); i++) {
ByteBuffer slab = slabs.get(i);
if (index < seen + slab.limit()) {
// ok found index
slab.put((int)(index-seen), value);
break;
}
seen += slab.limit();
}
} | java | {
"resource": ""
} |
q172084 | ColumnIndexBuilder.add | test | public void add(Statistics<?> stats) {
if (stats.hasNonNullValue()) {
nullPages.add(false);
Object min = stats.genericGetMin();
Object max = stats.genericGetMax();
addMinMax(min, max);
pageIndexes.add(nextPageIndex);
minMaxSize += sizeOf(min);
minMaxSize += sizeOf(max);
} else {
nullPages.add(true);
}
nullCounts.add(stats.getNumNulls());
++nextPageIndex;
} | java | {
"resource": ""
} |
q172085 | ColumnIndexFilter.calculateRowRanges | test | public static RowRanges calculateRowRanges(FilterCompat.Filter filter, ColumnIndexStore columnIndexStore,
Set<ColumnPath> paths, long rowCount) {
return filter.accept(new FilterCompat.Visitor<RowRanges>() {
@Override
public RowRanges visit(FilterPredicateCompat filterPredicateCompat) {
try {
return filterPredicateCompat.getFilterPredicate()
.accept(new ColumnIndexFilter(columnIndexStore, paths, rowCount));
} catch (MissingOffsetIndexException e) {
LOGGER.info(e.getMessage());
return RowRanges.createSingle(rowCount);
}
}
@Override
public RowRanges visit(UnboundRecordFilterCompat unboundRecordFilterCompat) {
return RowRanges.createSingle(rowCount);
}
@Override
public RowRanges visit(NoOpFilter noOpFilter) {
return RowRanges.createSingle(rowCount);
}
});
} | java | {
"resource": ""
} |
q172086 | HiveSchemaConverter.convertArrayType | test | private static GroupType convertArrayType(final String name, final ListTypeInfo typeInfo) {
final TypeInfo subType = typeInfo.getListElementTypeInfo();
return listWrapper(name, listType(), new GroupType(Repetition.REPEATED,
ParquetHiveSerDe.ARRAY.toString(), convertType("array_element", subType)));
} | java | {
"resource": ""
} |
q172087 | HiveSchemaConverter.convertStructType | test | private static GroupType convertStructType(final String name, final StructTypeInfo typeInfo) {
final List<String> columnNames = typeInfo.getAllStructFieldNames();
final List<TypeInfo> columnTypes = typeInfo.getAllStructFieldTypeInfos();
return new GroupType(Repetition.OPTIONAL, name, convertTypes(columnNames, columnTypes));
} | java | {
"resource": ""
} |
q172088 | GlobParser.annotateMessage | test | private static String annotateMessage(String message, int pos) {
StringBuilder sb = new StringBuilder(message);
sb.append('\n');
for (int i = 0; i < pos; i++) {
sb.append('-');
}
sb.append('^');
return sb.toString();
} | java | {
"resource": ""
} |
q172089 | RunLengthBitPackingHybridEncoder.endPreviousBitPackedRun | test | private void endPreviousBitPackedRun() {
if (bitPackedRunHeaderPointer == -1) {
// we're not currently in a bit-packed-run
return;
}
// create bit-packed-header, which needs to fit in 1 byte
byte bitPackHeader = (byte) ((bitPackedGroupCount << 1) | 1);
// update this byte
baos.setByte(bitPackedRunHeaderPointer, bitPackHeader);
// mark that this run is over
bitPackedRunHeaderPointer = -1;
// reset the number of groups
bitPackedGroupCount = 0;
} | java | {
"resource": ""
} |
q172090 | ProtocolReadToWrite.readOne | test | @Override
public void readOne(TProtocol in, TProtocol out) throws TException {
readOneStruct(in, out);
} | java | {
"resource": ""
} |
q172091 | ParquetFileReader.readFooters | test | @Deprecated
public static List<Footer> readFooters(Configuration configuration, FileStatus pathStatus) throws IOException {
return readFooters(configuration, pathStatus, false);
} | java | {
"resource": ""
} |
q172092 | ParquetFileReader.readSummaryFile | test | @Deprecated
public static List<Footer> readSummaryFile(Configuration configuration, FileStatus summaryStatus) throws IOException {
final Path parent = summaryStatus.getPath().getParent();
ParquetMetadata mergedFooters = readFooter(configuration, summaryStatus, filter(false));
return footersFromSummaryFile(parent, mergedFooters);
} | java | {
"resource": ""
} |
q172093 | ParquetFileReader.readFooter | test | @Deprecated
public static final ParquetMetadata readFooter(InputFile file, MetadataFilter filter) throws IOException {
ParquetReadOptions options;
if (file instanceof HadoopInputFile) {
options = HadoopReadOptions.builder(((HadoopInputFile) file).getConfiguration())
.withMetadataFilter(filter).build();
} else {
options = ParquetReadOptions.builder().withMetadataFilter(filter).build();
}
try (SeekableInputStream in = file.newStream()) {
return readFooter(file, options, in);
}
} | java | {
"resource": ""
} |
q172094 | ParquetFileReader.readNextRowGroup | test | public PageReadStore readNextRowGroup() throws IOException {
if (currentBlock == blocks.size()) {
return null;
}
BlockMetaData block = blocks.get(currentBlock);
if (block.getRowCount() == 0) {
throw new RuntimeException("Illegal row group of 0 rows");
}
this.currentRowGroup = new ColumnChunkPageReadStore(block.getRowCount());
// prepare the list of consecutive parts to read them in one scan
List<ConsecutivePartList> allParts = new ArrayList<ConsecutivePartList>();
ConsecutivePartList currentParts = null;
for (ColumnChunkMetaData mc : block.getColumns()) {
ColumnPath pathKey = mc.getPath();
BenchmarkCounter.incrementTotalBytes(mc.getTotalSize());
ColumnDescriptor columnDescriptor = paths.get(pathKey);
if (columnDescriptor != null) {
long startingPos = mc.getStartingPos();
// first part or not consecutive => new list
if (currentParts == null || currentParts.endPos() != startingPos) {
currentParts = new ConsecutivePartList(startingPos);
allParts.add(currentParts);
}
currentParts.addChunk(new ChunkDescriptor(columnDescriptor, mc, startingPos, (int)mc.getTotalSize()));
}
}
// actually read all the chunks
ChunkListBuilder builder = new ChunkListBuilder();
for (ConsecutivePartList consecutiveChunks : allParts) {
consecutiveChunks.readAll(f, builder);
}
for (Chunk chunk : builder.build()) {
currentRowGroup.addColumn(chunk.descriptor.col, chunk.readAllPages());
}
// avoid re-reading bytes the dictionary reader is used after this call
if (nextDictionaryReader != null) {
nextDictionaryReader.setRowGroup(currentRowGroup);
}
advanceToNextBlock();
return currentRowGroup;
} | java | {
"resource": ""
} |
q172095 | ParquetFileReader.readNextFilteredRowGroup | test | public PageReadStore readNextFilteredRowGroup() throws IOException {
if (currentBlock == blocks.size()) {
return null;
}
if (!options.useColumnIndexFilter()) {
return readNextRowGroup();
}
BlockMetaData block = blocks.get(currentBlock);
if (block.getRowCount() == 0) {
throw new RuntimeException("Illegal row group of 0 rows");
}
ColumnIndexStore ciStore = getColumnIndexStore(currentBlock);
RowRanges rowRanges = getRowRanges(currentBlock);
long rowCount = rowRanges.rowCount();
if (rowCount == 0) {
// There are no matching rows -> skipping this row-group
advanceToNextBlock();
return readNextFilteredRowGroup();
}
if (rowCount == block.getRowCount()) {
// All rows are matching -> fall back to the non-filtering path
return readNextRowGroup();
}
this.currentRowGroup = new ColumnChunkPageReadStore(rowRanges);
// prepare the list of consecutive parts to read them in one scan
ChunkListBuilder builder = new ChunkListBuilder();
List<ConsecutivePartList> allParts = new ArrayList<ConsecutivePartList>();
ConsecutivePartList currentParts = null;
for (ColumnChunkMetaData mc : block.getColumns()) {
ColumnPath pathKey = mc.getPath();
ColumnDescriptor columnDescriptor = paths.get(pathKey);
if (columnDescriptor != null) {
OffsetIndex offsetIndex = ciStore.getOffsetIndex(mc.getPath());
OffsetIndex filteredOffsetIndex = filterOffsetIndex(offsetIndex, rowRanges,
block.getRowCount());
for (OffsetRange range : calculateOffsetRanges(filteredOffsetIndex, mc, offsetIndex.getOffset(0))) {
BenchmarkCounter.incrementTotalBytes(range.getLength());
long startingPos = range.getOffset();
// first part or not consecutive => new list
if (currentParts == null || currentParts.endPos() != startingPos) {
currentParts = new ConsecutivePartList(startingPos);
allParts.add(currentParts);
}
ChunkDescriptor chunkDescriptor = new ChunkDescriptor(columnDescriptor, mc, startingPos,
(int) range.getLength());
currentParts.addChunk(chunkDescriptor);
builder.setOffsetIndex(chunkDescriptor, filteredOffsetIndex);
}
}
}
// actually read all the chunks
for (ConsecutivePartList consecutiveChunks : allParts) {
consecutiveChunks.readAll(f, builder);
}
for (Chunk chunk : builder.build()) {
currentRowGroup.addColumn(chunk.descriptor.col, chunk.readAllPages());
}
// avoid re-reading bytes the dictionary reader is used after this call
if (nextDictionaryReader != null) {
nextDictionaryReader.setRowGroup(currentRowGroup);
}
advanceToNextBlock();
return currentRowGroup;
} | java | {
"resource": ""
} |
q172096 | ParquetFileReader.readDictionary | test | DictionaryPage readDictionary(ColumnChunkMetaData meta) throws IOException {
if (!meta.getEncodings().contains(Encoding.PLAIN_DICTIONARY) &&
!meta.getEncodings().contains(Encoding.RLE_DICTIONARY)) {
return null;
}
// TODO: this should use getDictionaryPageOffset() but it isn't reliable.
if (f.getPos() != meta.getStartingPos()) {
f.seek(meta.getStartingPos());
}
PageHeader pageHeader = Util.readPageHeader(f);
if (!pageHeader.isSetDictionary_page_header()) {
return null; // TODO: should this complain?
}
DictionaryPage compressedPage = readCompressedDictionary(pageHeader, f);
BytesInputDecompressor decompressor = options.getCodecFactory().getDecompressor(meta.getCodec());
return new DictionaryPage(
decompressor.decompress(compressedPage.getBytes(), compressedPage.getUncompressedSize()),
compressedPage.getDictionarySize(),
compressedPage.getEncoding());
} | java | {
"resource": ""
} |
q172097 | MemoryManager.addWriter | test | synchronized void addWriter(InternalParquetRecordWriter writer, Long allocation) {
Long oldValue = writerList.get(writer);
if (oldValue == null) {
writerList.put(writer, allocation);
} else {
throw new IllegalArgumentException("[BUG] The Parquet Memory Manager should not add an " +
"instance of InternalParquetRecordWriter more than once. The Manager already contains " +
"the writer: " + writer);
}
updateAllocation();
} | java | {
"resource": ""
} |
q172098 | MemoryManager.removeWriter | test | synchronized void removeWriter(InternalParquetRecordWriter writer) {
if (writerList.containsKey(writer)) {
writerList.remove(writer);
}
if (!writerList.isEmpty()) {
updateAllocation();
}
} | java | {
"resource": ""
} |
q172099 | MemoryManager.updateAllocation | test | private void updateAllocation() {
long totalAllocations = 0;
for (Long allocation : writerList.values()) {
totalAllocations += allocation;
}
if (totalAllocations <= totalMemoryPool) {
scale = 1.0;
} else {
scale = (double) totalMemoryPool / totalAllocations;
LOG.warn(String.format(
"Total allocation exceeds %.2f%% (%,d bytes) of heap memory\n" +
"Scaling row group sizes to %.2f%% for %d writers",
100*memoryPoolRatio, totalMemoryPool, 100*scale, writerList.size()));
for (Runnable callBack : callBacks.values()) {
// we do not really want to start a new thread here.
callBack.run();
}
}
int maxColCount = 0;
for (InternalParquetRecordWriter w : writerList.keySet()) {
maxColCount = Math.max(w.getSchema().getColumns().size(), maxColCount);
}
for (Map.Entry<InternalParquetRecordWriter, Long> entry : writerList.entrySet()) {
long newSize = (long) Math.floor(entry.getValue() * scale);
if(scale < 1.0 && minMemoryAllocation > 0 && newSize < minMemoryAllocation) {
throw new ParquetRuntimeException(String.format("New Memory allocation %d bytes" +
" is smaller than the minimum allocation size of %d bytes.",
newSize, minMemoryAllocation)){};
}
entry.getKey().setRowGroupSizeThreshold(newSize);
LOG.debug(String.format("Adjust block size from %,d to %,d for writer: %s",
entry.getValue(), newSize, entry.getKey()));
}
} | java | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.