Skip to content

Commit

Permalink
Merge branch 'cassandra-5.0' into trunk
Browse files Browse the repository at this point in the history
  • Loading branch information
mike-tr-adamson committed Dec 20, 2023
2 parents bc0c8f6 + 3b05051 commit f76f328
Show file tree
Hide file tree
Showing 37 changed files with 419 additions and 1,205 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ public enum CassandraRelevantProperties
SAI_SORTED_TERMS_PARTITION_BLOCK_SHIFT("cassandra.sai.sorted_terms_partition_block_shift", "4"),

SAI_TEST_BALANCED_TREE_DEBUG_ENABLED("cassandra.sai.test.balanced_tree_debug_enabled", "false"),
SAI_TEST_DISABLE_TIMEOUT("cassandra.sai.test.disable.timeout", "false"),
SAI_TEST_DISABLE_TIMEOUT("cassandra.sai.test.timeout_disabled", "false"),

/** Whether to allow the user to specify custom options to the hnsw index */
SAI_VECTOR_ALLOW_CUSTOM_PARAMETERS("cassandra.sai.vector.allow_custom_parameters", "false"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@
import org.apache.cassandra.index.sai.disk.RowMapping;
import org.apache.cassandra.index.sai.disk.format.IndexComponent;
import org.apache.cassandra.index.sai.disk.format.IndexDescriptor;
import org.apache.cassandra.index.sai.disk.v1.bbtree.BlockBalancedTreeIterator;
import org.apache.cassandra.index.sai.disk.v1.bbtree.NumericIndexWriter;
import org.apache.cassandra.index.sai.disk.v1.segment.SegmentMetadata;
import org.apache.cassandra.index.sai.disk.v1.segment.SegmentWriter;
import org.apache.cassandra.index.sai.disk.v1.trie.LiteralIndexWriter;
import org.apache.cassandra.index.sai.memory.MemtableIndex;
import org.apache.cassandra.index.sai.memory.MemtableTermsIterator;
Expand Down Expand Up @@ -122,7 +122,7 @@ public void complete(Stopwatch stopwatch) throws IOException

try (MemtableTermsIterator terms = new MemtableTermsIterator(memtable.getMinTerm(), memtable.getMaxTerm(), iterator))
{
long cellCount = flush(terms, rowMapping.maxSSTableRowId);
long cellCount = flush(terms);

completeIndexFlush(cellCount, start, stopwatch);
}
Expand All @@ -137,28 +137,15 @@ public void complete(Stopwatch stopwatch) throws IOException
}
}

private long flush(MemtableTermsIterator terms, long maxSSTableRowId) throws IOException
private long flush(MemtableTermsIterator terms) throws IOException
{
long numRows;
SegmentMetadata.ComponentMetadataMap indexMetas;
SegmentWriter writer = indexTermType.isLiteral() ? new LiteralIndexWriter(indexDescriptor, indexIdentifier)
: new NumericIndexWriter(indexDescriptor,
indexIdentifier,
indexTermType.fixedSizeOf());

if (indexTermType.isLiteral())
{
try (LiteralIndexWriter writer = new LiteralIndexWriter(indexDescriptor, indexIdentifier))
{
indexMetas = writer.writeCompleteSegment(terms);
numRows = writer.getPostingsCount();
}
}
else
{
NumericIndexWriter writer = new NumericIndexWriter(indexDescriptor,
indexIdentifier,
indexTermType.fixedSizeOf(),
maxSSTableRowId);
indexMetas = writer.writeCompleteSegment(BlockBalancedTreeIterator.fromTermsIterator(terms, indexTermType));
numRows = writer.getValueCount();
}
SegmentMetadata.ComponentMetadataMap indexMetas = writer.writeCompleteSegment(terms);
long numRows = writer.getNumberOfRows();

// If no rows were written we need to delete any created column index components
// so that the index is correctly identified as being empty (only having a completion marker)
Expand All @@ -179,9 +166,9 @@ private long flush(MemtableTermsIterator terms, long maxSSTableRowId) throws IOE
terms.getMaxTerm(),
indexMetas);

try (MetadataWriter writer = new MetadataWriter(indexDescriptor.openPerIndexOutput(IndexComponent.META, indexIdentifier)))
try (MetadataWriter metadataWriter = new MetadataWriter(indexDescriptor.openPerIndexOutput(IndexComponent.META, indexIdentifier)))
{
SegmentMetadata.write(writer, Collections.singletonList(metadata));
SegmentMetadata.write(metadataWriter, Collections.singletonList(metadata));
}

return numRows;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ public void complete(Stopwatch stopwatch) throws IOException
if (currentBuilder != null)
{
long bytesAllocated = currentBuilder.totalBytesAllocated();
long globalBytesUsed = currentBuilder.release(index.identifier());
long globalBytesUsed = currentBuilder.release();
logger.debug(index.identifier().logMessage("Flushing final segment for SSTable {} released {}. Global segment memory usage now at {}."),
indexDescriptor.sstableDescriptor, FBUtilities.prettyPrintMemory(bytesAllocated), FBUtilities.prettyPrintMemory(globalBytesUsed));
}
Expand Down Expand Up @@ -160,7 +160,7 @@ public void abort(Throwable cause)
// If an exception is thrown out of any writer operation prior to successful segment
// flush, we will end up here, and we need to free up builder memory tracked by the limiter:
long allocated = currentBuilder.totalBytesAllocated();
long globalBytesUsed = currentBuilder.release(index.identifier());
long globalBytesUsed = currentBuilder.release();
logger.debug(index.identifier().logMessage("Aborting index writer for SSTable {} released {}. Global segment memory usage now at {}."),
indexDescriptor.sstableDescriptor, FBUtilities.prettyPrintMemory(allocated), FBUtilities.prettyPrintMemory(globalBytesUsed));
}
Expand Down Expand Up @@ -251,7 +251,7 @@ private void flushSegment() throws IOException
{
long bytesAllocated = currentBuilder.totalBytesAllocated();

SegmentMetadata segmentMetadata = currentBuilder.flush(indexDescriptor, index.identifier());
SegmentMetadata segmentMetadata = currentBuilder.flush(indexDescriptor);

long flushMillis = Math.max(1, TimeUnit.NANOSECONDS.toMillis(Clock.Global.nanoTime() - start));

Expand All @@ -273,7 +273,7 @@ private void flushSegment() throws IOException
// flush. Note that any failure that occurs before this (even in term addition) will
// actuate this column writer's abort logic from the parent SSTable-level writer, and
// that abort logic will release the current builder's memory against the limiter.
long globalBytesUsed = currentBuilder.release(index.identifier());
long globalBytesUsed = currentBuilder.release();
currentBuilder = null;
logger.debug(index.identifier().logMessage("Flushing index segment for SSTable {} released {}. Global segment memory usage now at {}."),
indexDescriptor.sstableDescriptor, FBUtilities.prettyPrintMemory(bytesAllocated), FBUtilities.prettyPrintMemory(globalBytesUsed));
Expand Down Expand Up @@ -306,14 +306,8 @@ private void writeSegmentsMetadata() throws IOException

private SegmentBuilder newSegmentBuilder()
{
SegmentBuilder builder;

if (index.termType().isVector())
builder = new SegmentBuilder.VectorSegmentBuilder(index.termType(), limiter, index.indexWriterConfig());
else if (index.termType().isLiteral())
builder = new SegmentBuilder.RAMStringSegmentBuilder(index.termType(), limiter);
else
builder = new SegmentBuilder.BlockBalancedTreeSegmentBuilder(index.termType(), limiter);
SegmentBuilder builder = index.termType().isVector() ? new SegmentBuilder.VectorSegmentBuilder(index, limiter)
: new SegmentBuilder.TrieSegmentBuilder(index, limiter);

long globalBytesUsed = limiter.increment(builder.totalBytesAllocated());
logger.debug(index.identifier().logMessage("Created new segment builder while flushing SSTable {}. Global segment memory usage now at {}."),
Expand Down

This file was deleted.

Loading

0 comments on commit f76f328

Please sign in to comment.