Repository: cassandra Updated Branches: refs/heads/trunk 8047d5a76 -> f13bebe52
Add 3.0 metadata to sstablemetadata output Project: http://git-wip-us.apache.org/repos/asf/cassandra/repo Commit: http://git-wip-us.apache.org/repos/asf/cassandra/commit/f13bebe5 Tree: http://git-wip-us.apache.org/repos/asf/cassandra/tree/f13bebe5 Diff: http://git-wip-us.apache.org/repos/asf/cassandra/diff/f13bebe5 Branch: refs/heads/trunk Commit: f13bebe52e5a1f11635e9eb4f300b2a36d936d24 Parents: 8047d5a Author: Shogo Hoshii <[email protected]> Authored: Wed Jan 6 14:45:17 2016 -0600 Committer: Yuki Morishita <[email protected]> Committed: Thu Jan 7 09:29:42 2016 -0600 ---------------------------------------------------------------------- CHANGES.txt | 1 + .../cassandra/db/SerializationHeader.java | 25 +++++++++ .../cassandra/tools/SSTableMetadataViewer.java | 55 ++++++++++++++++++++ 3 files changed, 81 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/cassandra/blob/f13bebe5/CHANGES.txt ---------------------------------------------------------------------- diff --git a/CHANGES.txt b/CHANGES.txt index 8276a9a..8667edd 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,5 +1,6 @@ 3.4 * Add nodetool gettimeout and settimeout commands (CASSANDRA-10953) + * Add 3.0 metadata to sstablemetadata output (CASSANDRA-10838) 3.3 http://git-wip-us.apache.org/repos/asf/cassandra/blob/f13bebe5/src/java/org/apache/cassandra/db/SerializationHeader.java ---------------------------------------------------------------------- diff --git a/src/java/org/apache/cassandra/db/SerializationHeader.java b/src/java/org/apache/cassandra/db/SerializationHeader.java index 0706d06..6aee0b6 100644 --- a/src/java/org/apache/cassandra/db/SerializationHeader.java +++ b/src/java/org/apache/cassandra/db/SerializationHeader.java @@ -361,6 +361,31 @@ public class SerializationHeader return String.format("SerializationHeader.Component[key=%s, cks=%s, statics=%s, regulars=%s, stats=%s]", keyType, clusteringTypes, staticColumns, regularColumns, stats); } + + public AbstractType<?> getKetType() + { + return keyType; + } + + public List<AbstractType<?>> getClusteringTypes() + { + return clusteringTypes; + } + + public Map<ByteBuffer, AbstractType<?>> getStaticColumns() + { + return staticColumns; + } + + public Map<ByteBuffer, AbstractType<?>> getRegularColumns() + { + return regularColumns; + } + + public EncodingStats getEncodingStats() + { + return stats; + } } public static class Serializer implements IMetadataComponentSerializer<Component> http://git-wip-us.apache.org/repos/asf/cassandra/blob/f13bebe5/src/java/org/apache/cassandra/tools/SSTableMetadataViewer.java ---------------------------------------------------------------------- diff --git a/src/java/org/apache/cassandra/tools/SSTableMetadataViewer.java b/src/java/org/apache/cassandra/tools/SSTableMetadataViewer.java index 5b47004..25e6a8f 100644 --- a/src/java/org/apache/cassandra/tools/SSTableMetadataViewer.java +++ b/src/java/org/apache/cassandra/tools/SSTableMetadataViewer.java @@ -20,13 +20,22 @@ package org.apache.cassandra.tools; import java.io.File; import java.io.IOException; import java.io.PrintStream; +import java.nio.ByteBuffer; +import java.util.Arrays; import java.util.EnumSet; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import org.apache.cassandra.db.SerializationHeader; +import org.apache.cassandra.db.marshal.AbstractType; +import org.apache.cassandra.db.marshal.UTF8Type; +import org.apache.cassandra.db.rows.EncodingStats; import org.apache.cassandra.io.compress.CompressionMetadata; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.sstable.metadata.*; +import org.apache.cassandra.utils.FBUtilities; /** * Shows the contents of sstable metadata @@ -60,6 +69,7 @@ public class SSTableMetadataViewer File compressionFile = new File(descriptor.filenameFor(Component.COMPRESSION_INFO)); if (compressionFile.exists()) compression = CompressionMetadata.create(fname); + SerializationHeader.Component header = (SerializationHeader.Component) metadata.get(MetadataType.HEADER); out.printf("SSTable: %s%n", descriptor); if (validation != null) @@ -71,15 +81,36 @@ public class SSTableMetadataViewer { out.printf("Minimum timestamp: %s%n", stats.minTimestamp); out.printf("Maximum timestamp: %s%n", stats.maxTimestamp); + out.printf("SSTable min local deletion time: %s%n", stats.minLocalDeletionTime); out.printf("SSTable max local deletion time: %s%n", stats.maxLocalDeletionTime); out.printf("Compressor: %s%n", compression != null ? compression.compressor().getClass().getName() : "-"); if (compression != null) out.printf("Compression ratio: %s%n", stats.compressionRatio); + out.printf("TTL min: %s%n", stats.minTTL); + out.printf("TTL max: %s%n", stats.maxTTL); + if (header != null && header.getClusteringTypes().size() == stats.minClusteringValues.size()) + { + List<AbstractType<?>> clusteringTypes = header.getClusteringTypes(); + List<ByteBuffer> minClusteringValues = stats.minClusteringValues; + List<ByteBuffer> maxClusteringValues = stats.maxClusteringValues; + String[] minValues = new String[clusteringTypes.size()]; + String[] maxValues = new String[clusteringTypes.size()]; + for (int i = 0; i < clusteringTypes.size(); i++) + { + minValues[i] = clusteringTypes.get(i).getString(minClusteringValues.get(i)); + maxValues[i] = clusteringTypes.get(i).getString(maxClusteringValues.get(i)); + } + out.printf("minClustringValues: %s%n", Arrays.toString(minValues)); + out.printf("maxClustringValues: %s%n", Arrays.toString(maxValues)); + } out.printf("Estimated droppable tombstones: %s%n", stats.getEstimatedDroppableTombstoneRatio((int) (System.currentTimeMillis() / 1000))); out.printf("SSTable Level: %d%n", stats.sstableLevel); out.printf("Repaired at: %d%n", stats.repairedAt); out.println(stats.replayPosition); + out.printf("totalColumnsSet: %s%n", stats.totalColumnsSet); + out.printf("totalRows: %s%n", stats.totalRows); out.println("Estimated tombstone drop times:"); + for (Map.Entry<Double, Long> entry : stats.estimatedTombstoneDropTime.getAsMap().entrySet()) { out.printf("%-10s:%10s%n",entry.getKey().intValue(), entry.getValue()); @@ -90,6 +121,30 @@ public class SSTableMetadataViewer { out.printf("Estimated cardinality: %s%n", compaction.cardinalityEstimator.cardinality()); } + if (header != null) + { + EncodingStats encodingStats = header.getEncodingStats(); + AbstractType<?> keyType = header.getKetType(); + List<AbstractType<?>> clusteringTypes = header.getClusteringTypes(); + Map<ByteBuffer, AbstractType<?>> staticColumns = header.getStaticColumns(); + Map<String, String> statics = staticColumns.entrySet().stream() + .collect(Collectors.toMap( + e -> UTF8Type.instance.getString(e.getKey()), + e -> e.getValue().toString())); + Map<ByteBuffer, AbstractType<?>> regularColumns = header.getRegularColumns(); + Map<String, String> regulars = regularColumns.entrySet().stream() + .collect(Collectors.toMap( + e -> UTF8Type.instance.getString(e.getKey()), + e -> e.getValue().toString())); + + out.printf("EncodingStats minTTL: %s%n", encodingStats.minTTL); + out.printf("EncodingStats minLocalDeletionTime: %s%n", encodingStats.minLocalDeletionTime); + out.printf("EncodingStats minTimestamp: %s%n", encodingStats.minTimestamp); + out.printf("KeyType: %s%n", keyType.toString()); + out.printf("ClusteringTypes: %s%n", clusteringTypes.toString()); + out.printf("StaticColumns: {%s}%n", FBUtilities.toString(statics)); + out.printf("RegularColumns: {%s}%n", FBUtilities.toString(regulars)); + } } else {
