Updated Branches: refs/heads/trunk be0523a24 -> 6f5d8a56f
Ninja-kill what's left from avro Project: http://git-wip-us.apache.org/repos/asf/cassandra/repo Commit: http://git-wip-us.apache.org/repos/asf/cassandra/commit/6f5d8a56 Tree: http://git-wip-us.apache.org/repos/asf/cassandra/tree/6f5d8a56 Diff: http://git-wip-us.apache.org/repos/asf/cassandra/diff/6f5d8a56 Branch: refs/heads/trunk Commit: 6f5d8a56fc026fbf7173cfc364346def3806c7d5 Parents: be0523a Author: Aleksey Yeschenko <[email protected]> Authored: Fri May 31 04:04:36 2013 +0300 Committer: Aleksey Yeschenko <[email protected]> Committed: Fri May 31 04:05:07 2013 +0300 ---------------------------------------------------------------------- build.xml | 64 +---- lib/avro-1.4.0-fixes.jar | Bin 596381 -> 0 bytes lib/avro-1.4.0-sources-fixes.jar | Bin 276425 -> 0 bytes lib/licenses/avro-1.4.0.txt | 202 ------------ src/avro/internode.genavro | 131 -------- src/java/org/apache/cassandra/config/Avro.java | 254 --------------- src/java/org/apache/cassandra/db/DefsTable.java | 93 ------ .../apache/cassandra/config/CFMetaDataTest.java | 3 +- .../cassandra/config/DatabaseDescriptorTest.java | 1 - 9 files changed, 3 insertions(+), 745 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/build.xml ---------------------------------------------------------------------- diff --git a/build.xml b/build.xml index 5a2feff..97d0efd 100644 --- a/build.xml +++ b/build.xml @@ -35,7 +35,6 @@ <property name="build.src" value="${basedir}/src"/> <property name="build.src.java" value="${basedir}/src/java"/> <property name="build.src.resources" value="${basedir}/src/resources"/> - <property name="avro.src" value="${basedir}/src/avro"/> <property name="build.src.gen-java" value="${basedir}/src/gen-java"/> <property name="build.lib" value="${basedir}/lib"/> <property name="build.dir" value="${basedir}/build"/> @@ -168,7 +167,6 @@ <delete dir="${build.classes}" /> <delete dir="${cobertura.classes.dir}" /> <delete dir="${build.src.gen-java}" /> - <delete file="${build.dir}/internode.avpr" /> </target> <target depends="clean" name="cleanall"/> @@ -356,16 +354,8 @@ <dependency groupId="com.github.stephenc.high-scale-lib" artifactId="high-scale-lib" version="1.1.2"/> <dependency groupId="com.github.stephenc" artifactId="jamm" version="0.2.5"/> <dependency groupId="org.yaml" artifactId="snakeyaml" version="1.6"/> - <dependency groupId="org.apache.cassandra.deps" artifactId="avro" version="1.4.0-cassandra-1"> - <exclusion groupId="org.jboss.netty" artifactId="netty"/> - <exclusion groupId="com.thoughtworks.paranamer" artifactId="paranamer"/> - <exclusion groupId="com.thoughtworks.paranamer" artifactId="paranamer-ant"/> - <exclusion groupId="org.apache.velocity" artifactId="velocity"/> - </dependency> - <dependency groupId="org.apache.thrift" artifactId="libthrift" version="0.9.0"/> - <dependency groupId="com.thoughtworks.paranamer" artifactId="paranamer-ant" version="2.1"/> <dependency groupId="junit" artifactId="junit" version="4.6" /> <dependency groupId="commons-logging" artifactId="commons-logging" version="1.1.1"/> <dependency groupId="org.apache.rat" artifactId="apache-rat" version="0.6"> @@ -412,9 +402,6 @@ <parent groupId="org.apache.cassandra" artifactId="cassandra-parent" version="${version}"/> - <!-- FIXME: paranamer can be dropped after we're depending on avro - (since it depends on them). --> - <dependency groupId="com.thoughtworks.paranamer" artifactId="paranamer-ant"/> <dependency groupId="junit" artifactId="junit"/> <dependency groupId="commons-logging" artifactId="commons-logging"/> <dependency groupId="org.apache.rat" artifactId="apache-rat"/> @@ -463,7 +450,6 @@ <dependency groupId="com.googlecode.concurrentlinkedhashmap" artifactId="concurrentlinkedhashmap-lru"/> <dependency groupId="org.antlr" artifactId="antlr"/> <dependency groupId="org.slf4j" artifactId="slf4j-api"/> - <dependency groupId="org.apache.cassandra.deps" artifactId="avro"/> <dependency groupId="org.codehaus.jackson" artifactId="jackson-core-asl"/> <dependency groupId="org.codehaus.jackson" artifactId="jackson-mapper-asl"/> <dependency groupId="jline" artifactId="jline"/> @@ -573,48 +559,6 @@ </copy> </target> - <!-- - Generate avro code - --> - <target name="check-avro-generate" depends="init"> - <taskdef name="avro-protocol" classname="org.apache.avro.specific.ProtocolTask"> - <classpath refid="cassandra.classpath" /> - </taskdef> - <uptodate property="avroINProtoUpToDate" srcfile="${avro.src}/internode.genavro" - targetfile="${build.dir}/internode.avpr" /> - </target> - - <target name="avro-generate" - unless="avroINProtoUpToDate" - depends="init,check-avro-generate,maven-ant-tasks-retrieve-build" - description="Generates Java Avro classes for internal use."> - <avromacro protocolname="internode" inputfile="${avro.src}/internode.genavro" - jsondir="${build.dir}" outputdir="${build.src}"/> - </target> - - <macrodef name="avromacro"> - <attribute name="protocolname" /> - <attribute name="inputfile" /> - <attribute name="jsondir" /> - <attribute name="outputdir" /> - <sequential> - <echo message="Generating Avro @{protocolname} code..." /> - <mkdir dir="@{jsondir}" /> - <!-- Generate json schema from genavro IDL --> - <java classname="org.apache.avro.tool.Main" fork="true"> - <classpath refid="cassandra.classpath" /> - <arg value="idl" /> - <arg value="@{inputfile}" /> - <arg value="@{jsondir}/@{protocolname}.avpr" /> - </java> - - <!-- Generate java code from JSON protocol schema --> - <avro-protocol destdir="@{outputdir}/gen-java"> - <fileset file="@{jsondir}/@{protocolname}.avpr" /> - </avro-protocol> - </sequential> - </macrodef> - <!-- Generate thrift code. We have targets to build java because @@ -692,7 +636,7 @@ depends="maven-ant-tasks-retrieve-build,build-project" description="Compile Cassandra classes"/> <target name="codecoverage" depends="cobertura-instrument,test,cobertura-report" description="Create code coverage report"/> - <target depends="init,avro-generate,gen-cli-grammar,gen-cql2-grammar,gen-cql3-grammar" + <target depends="init,gen-cli-grammar,gen-cql2-grammar,gen-cql3-grammar" name="build-project"> <echo message="${ant.project.name}: ${ant.file}"/> <!-- Order matters! --> @@ -710,10 +654,6 @@ <copy todir="${build.classes.main}"> <fileset dir="${build.src.resources}" /> </copy> - <taskdef name="paranamer" classname="com.thoughtworks.paranamer.ant.ParanamerGeneratorTask"> - <classpath refid="cassandra.classpath" /> - </taskdef> - <antcall target="createVersionPropFile"/> </target> @@ -879,7 +819,7 @@ <!-- The sources-jar target makes cassandra-sources.jar output required for publishing to Maven central repository. --> - <target name="sources-jar" depends="init,avro-generate" description="Assemble Cassandra Sources JAR file"> + <target name="sources-jar" depends="init" description="Assemble Cassandra Sources JAR file"> <jar jarfile="${build.dir}/${ant.project.name}-thrift-${version}-sources.jar"> <fileset dir="${interface.thrift.dir}/gen-java" defaultexcludes="yes"> <include name="org/apache/**/*.java"/> http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/lib/avro-1.4.0-fixes.jar ---------------------------------------------------------------------- diff --git a/lib/avro-1.4.0-fixes.jar b/lib/avro-1.4.0-fixes.jar deleted file mode 100644 index 6733756..0000000 Binary files a/lib/avro-1.4.0-fixes.jar and /dev/null differ http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/lib/avro-1.4.0-sources-fixes.jar ---------------------------------------------------------------------- diff --git a/lib/avro-1.4.0-sources-fixes.jar b/lib/avro-1.4.0-sources-fixes.jar deleted file mode 100644 index 2b6e2cb..0000000 Binary files a/lib/avro-1.4.0-sources-fixes.jar and /dev/null differ http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/lib/licenses/avro-1.4.0.txt ---------------------------------------------------------------------- diff --git a/lib/licenses/avro-1.4.0.txt b/lib/licenses/avro-1.4.0.txt deleted file mode 100644 index d645695..0000000 --- a/lib/licenses/avro-1.4.0.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/src/avro/internode.genavro ---------------------------------------------------------------------- diff --git a/src/avro/internode.genavro b/src/avro/internode.genavro deleted file mode 100644 index d7ba60f..0000000 --- a/src/avro/internode.genavro +++ /dev/null @@ -1,131 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Cassandra internal/internode communication protocol - * - * NOTE: in order to maintain backwards compatibility you *must* - * only append new fields *and* they must have a default value. - */ -@namespace("org.apache.cassandra") - -protocol InterNode { - @aliases(["org.apache.cassandra.config.avro.IndexType"]) - @namespace("org.apache.cassandra.db.migration.avro") - enum IndexType { - KEYS, - CUSTOM - } - - @aliases(["org.apache.cassandra.config.avro.ColumnDef"]) - @namespace("org.apache.cassandra.db.migration.avro") - record ColumnDef { - bytes name; - string validation_class; - union { IndexType, null } index_type; - union { string, null } index_name; - union { null, map<string> } index_options = null; - } - - @aliases(["org.apache.cassandra.config.avro.CfDef"]) - @namespace("org.apache.cassandra.db.migration.avro") - record CfDef { - string keyspace; - string name; - union { string, null } column_type; - union { string, null } comparator_type; - union { string, null } subcomparator_type; - union { string, null } comment; - union { double, null } read_repair_chance; - boolean replicate_on_write = false; - union { int, null } gc_grace_seconds; - union { null, string } default_validation_class = null; - union { null, string } key_validation_class = null; - union { null, int } min_compaction_threshold = null; - union { null, int } max_compaction_threshold = null; - union { null, double} merge_shards_chance = null; - union { int, null } id; - union { array<ColumnDef>, null } column_metadata; - union { null, bytes } key_alias = null; - union { null, string } compaction_strategy = null; - union { null, map<string> } compaction_strategy_options = null; - union { null, map<string> } compression_options = null; - union { null, double } bloom_filter_fp_chance = null; - union { null, string } caching = null; - union { null, array<bytes> } column_aliases = null; - union { null, bytes } value_alias = null; - union { double, null } dclocal_read_repair_chance = 0.0; - union { double, null } row_cache_size; - union { double, null } key_cache_size; - } - - @aliases(["org.apache.cassandra.config.avro.KsDef"]) - @namespace("org.apache.cassandra.db.migration.avro") - record KsDef { - string name; - string strategy_class; - union{ map<string>, null } strategy_options; - union{ int, null } replication_factor; - array<CfDef> cf_defs; - boolean durable_writes = true; - } - - @namespace("org.apache.cassandra.utils.avro") - fixed UUID(16); - - @namespace("org.apache.cassandra.db.migration.avro") - record AddColumnFamily { - CfDef cf; - } - - @namespace("org.apache.cassandra.db.migration.avro") - record AddKeyspace { - KsDef ks; - } - - @namespace("org.apache.cassandra.db.migration.avro") - record DropColumnFamily { - string ksname; - string cfname; - } - - @namespace("org.apache.cassandra.db.migration.avro") - record DropKeyspace { - string ksname; - } - - @namespace("org.apache.cassandra.db.migration.avro") - record UpdateKeyspace { - KsDef oldKs; - KsDef newKs; - } - - @namespace("org.apache.cassandra.db.migration.avro") - record UpdateColumnFamily { - CfDef metadata; - } - - @namespace("org.apache.cassandra.db.migration.avro") - record Migration { - org.apache.cassandra.utils.avro.UUID old_version; - org.apache.cassandra.utils.avro.UUID new_version; - bytes row_mutation; - string classname; - union { AddColumnFamily,DropColumnFamily,AddKeyspace,DropKeyspace,UpdateKeyspace,UpdateColumnFamily } migration; - } -} http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/src/java/org/apache/cassandra/config/Avro.java ---------------------------------------------------------------------- diff --git a/src/java/org/apache/cassandra/config/Avro.java b/src/java/org/apache/cassandra/config/Avro.java deleted file mode 100644 index c712e07..0000000 --- a/src/java/org/apache/cassandra/config/Avro.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.cassandra.config; - -import java.nio.ByteBuffer; -import java.util.*; - -import org.apache.cassandra.db.ColumnFamilyType; -import org.apache.cassandra.db.marshal.AbstractType; -import org.apache.cassandra.db.marshal.BytesType; -import org.apache.cassandra.db.marshal.CompositeType; -import org.apache.cassandra.db.marshal.TypeParser; -import org.apache.cassandra.db.migration.avro.CfDef; -import org.apache.cassandra.exceptions.ConfigurationException; -import org.apache.cassandra.exceptions.RequestValidationException; -import org.apache.cassandra.io.compress.CompressionParameters; -import org.apache.cassandra.locator.AbstractReplicationStrategy; -import org.apache.cassandra.locator.NetworkTopologyStrategy; -import org.apache.cassandra.thrift.IndexType; -import org.apache.cassandra.utils.ByteBufferUtil; - -import static org.apache.cassandra.config.CFMetaData.Caching; - -/** - * methods to load schema definitions from old-style Avro serialization - */ -public class Avro -{ - @Deprecated - public static KSMetaData ksFromAvro(org.apache.cassandra.db.migration.avro.KsDef ks) - { - Class<? extends AbstractReplicationStrategy> repStratClass; - try - { - String strategyClassName = KSMetaData.convertOldStrategyName(ks.strategy_class.toString()); - repStratClass = (Class<AbstractReplicationStrategy>)Class.forName(strategyClassName); - } - catch (Exception ex) - { - throw new RuntimeException("Could not create ReplicationStrategy of type " + ks.strategy_class, ex); - } - - Map<String, String> strategyOptions = new HashMap<String, String>(); - if (ks.strategy_options != null) - { - for (Map.Entry<CharSequence, CharSequence> e : ks.strategy_options.entrySet()) - { - String name = e.getKey().toString(); - // Silently discard a replication_factor option to NTS. - // The history is, people were creating CFs with the default settings (which in the CLI is NTS) and then - // giving it a replication_factor option, which is nonsensical. Initially our strategy was to silently - // ignore this option, but that turned out to confuse people more. So in 0.8.2 we switched to throwing - // an exception in the NTS constructor, which would be turned into an InvalidRequestException for the - // client. But, it also prevented startup for anyone upgrading without first cleaning that option out. - if (repStratClass == NetworkTopologyStrategy.class && name.trim().toLowerCase().equals("replication_factor")) - continue; - strategyOptions.put(name, e.getValue().toString()); - } - } - maybeAddReplicationFactor(strategyOptions, ks.strategy_class.toString(), ks.replication_factor); - - int cfsz = ks.cf_defs.size(); - List<CFMetaData> cfMetaData = new ArrayList<CFMetaData>(cfsz); - - for (CfDef cf_def : ks.cf_defs) - { - double keysCached = cf_def.key_cache_size == null ? -1 : cf_def.key_cache_size; - double rowsCached = cf_def.row_cache_size == null ? -1 : cf_def.row_cache_size; - - if (keysCached > 0 && rowsCached > 0) - cf_def.caching = Caching.ALL.name(); - else if (keysCached <= 0 && rowsCached <= 0) - cf_def.caching = Caching.NONE.name(); - else if (keysCached > 0 && rowsCached <= 0) - cf_def.caching = Caching.KEYS_ONLY.name(); - else - cf_def.caching = Caching.ROWS_ONLY.name(); - - cfMetaData.add(cfFromAvro(cf_def)); - } - - return new KSMetaData(ks.name.toString(), repStratClass, strategyOptions, ks.durable_writes, cfMetaData); - } - - @Deprecated - private static void maybeAddReplicationFactor(Map<String, String> options, String cls, Integer rf) - { - if (rf != null && (cls.endsWith("SimpleStrategy") || cls.endsWith("OldNetworkTopologyStrategy"))) - options.put("replication_factor", rf.toString()); - } - - @Deprecated - public static CFMetaData cfFromAvro(CfDef cf) - { - AbstractType<?> comparator; - AbstractType<?> subcolumnComparator = null; - AbstractType<?> validator; - AbstractType<?> keyValidator; - - try - { - comparator = TypeParser.parse(cf.comparator_type.toString()); - if (cf.subcomparator_type != null) - subcolumnComparator = TypeParser.parse(cf.subcomparator_type); - validator = TypeParser.parse(cf.default_validation_class); - keyValidator = TypeParser.parse(cf.key_validation_class); - } - catch (Exception ex) - { - throw new RuntimeException("Could not inflate CFMetaData for " + cf, ex); - } - Map<ByteBuffer, ColumnDefinition> column_metadata = new TreeMap<ByteBuffer, ColumnDefinition>(BytesType.instance); - for (org.apache.cassandra.db.migration.avro.ColumnDef aColumn_metadata : cf.column_metadata) - { - ColumnDefinition cd = columnFromAvro(aColumn_metadata); - if (cd.getIndexType() != null && cd.getIndexName() == null) - cd.setIndexName(CFMetaData.getDefaultIndexName(cf.name.toString(), comparator, cd.name)); - column_metadata.put(cd.name, cd); - } - - CFMetaData newCFMD = new CFMetaData(cf.keyspace.toString(), - cf.name.toString(), - ColumnFamilyType.create(cf.column_type.toString()), - comparator, - subcolumnComparator); - - // When we pull up an old avro CfDef which doesn't have these arguments, - // it doesn't default them correctly. Without explicit defaulting, - // grandfathered metadata becomes wrong or causes crashes. - // Isn't AVRO supposed to handle stuff like this? - if (cf.min_compaction_threshold != null) { newCFMD.minCompactionThreshold(cf.min_compaction_threshold); } - if (cf.max_compaction_threshold != null) { newCFMD.maxCompactionThreshold(cf.max_compaction_threshold); } - - if (cf.key_alias != null) - newCFMD.addOrReplaceColumnDefinition(ColumnDefinition.partitionKeyDef(cf.key_alias, keyValidator, null)); - if (cf.column_aliases != null) - { - if (comparator instanceof CompositeType) - { - List<AbstractType<?>> components = ((CompositeType)comparator).types; - for (int i = 0; i < cf.column_aliases.size(); ++i) - if (cf.column_aliases.get(i) != null) - newCFMD.addOrReplaceColumnDefinition(ColumnDefinition.clusteringKeyDef(cf.column_aliases.get(i), components.get(i), i)); - } - else - { - assert cf.column_aliases.size() <= 1; - if (cf.column_aliases.get(0) != null) - newCFMD.addOrReplaceColumnDefinition(ColumnDefinition.clusteringKeyDef(cf.column_aliases.get(0), comparator, null)); - } - } - if (cf.value_alias != null) - newCFMD.addOrReplaceColumnDefinition(ColumnDefinition.compactValueDef(cf.value_alias, validator)); - - if (cf.compaction_strategy != null) - { - try - { - newCFMD.compactionStrategyClass = CFMetaData.createCompactionStrategy(cf.compaction_strategy.toString()); - } - catch (ConfigurationException e) - { - throw new RuntimeException(e); - } - } - if (cf.compaction_strategy_options != null) - { - for (Map.Entry<CharSequence, CharSequence> e : cf.compaction_strategy_options.entrySet()) - newCFMD.compactionStrategyOptions.put(e.getKey().toString(), e.getValue().toString()); - } - - CompressionParameters cp; - try - { - cp = CompressionParameters.create(cf.compression_options); - } - catch (ConfigurationException e) - { - throw new RuntimeException(e); - } - - CFMetaData.Caching caching; - - try - { - caching = cf.caching == null ? CFMetaData.DEFAULT_CACHING_STRATEGY : CFMetaData.Caching.fromString(cf.caching.toString()); - } - catch (ConfigurationException e) - { - throw new RuntimeException(e); - } - - return newCFMD.comment(cf.comment.toString()) - .readRepairChance(cf.read_repair_chance) - .dcLocalReadRepairChance(cf.dclocal_read_repair_chance) - .replicateOnWrite(cf.replicate_on_write) - .gcGraceSeconds(cf.gc_grace_seconds) - .defaultValidator(validator) - .keyValidator(keyValidator) - .columnMetadata(column_metadata) - .compressionParameters(cp) - .bloomFilterFpChance(cf.bloom_filter_fp_chance) - .caching(caching); - } - - @Deprecated - public static ColumnDefinition columnFromAvro(org.apache.cassandra.db.migration.avro.ColumnDef cd) - { - IndexType index_type = cd.index_type == null ? null : Enum.valueOf(IndexType.class, cd.index_type.name()); - String index_name = cd.index_name == null ? null : cd.index_name.toString(); - try - { - AbstractType<?> validatorType = TypeParser.parse(cd.validation_class); - ColumnDefinition def = ColumnDefinition.regularDef(ByteBufferUtil.clone(cd.name), validatorType, null); - def.setIndexName(index_name); - def.setIndexType(index_type, getStringMap(cd.index_options)); - return def; - } - catch (RequestValidationException e) - { - throw new RuntimeException(e); - } - } - - public static Map<String,String> getStringMap(Map<CharSequence, CharSequence> charMap) - { - if (charMap == null) - return null; - - Map<String,String> stringMap = new HashMap<String, String>(); - - for (Map.Entry<CharSequence, CharSequence> entry : charMap.entrySet()) - stringMap.put(entry.getKey().toString(), entry.getValue().toString()); - - - return stringMap; - } -} http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/src/java/org/apache/cassandra/db/DefsTable.java ---------------------------------------------------------------------- diff --git a/src/java/org/apache/cassandra/db/DefsTable.java b/src/java/org/apache/cassandra/db/DefsTable.java index 482f2f9..df551e0 100644 --- a/src/java/org/apache/cassandra/db/DefsTable.java +++ b/src/java/org/apache/cassandra/db/DefsTable.java @@ -21,23 +21,15 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.collect.Iterables; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; -import org.apache.avro.io.BinaryDecoder; -import org.apache.avro.io.DecoderFactory; -import org.apache.avro.specific.SpecificDatumReader; -import org.apache.avro.specific.SpecificRecord; import org.apache.cassandra.config.*; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.db.marshal.AsciiType; import org.apache.cassandra.db.marshal.UTF8Type; -import org.apache.cassandra.db.migration.avro.KsDef; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.service.MigrationManager; @@ -113,18 +105,6 @@ import org.apache.cassandra.utils.FBUtilities; */ public class DefsTable { - private final static Logger logger = LoggerFactory.getLogger(DefsTable.class); - - // unbuffered decoders - private final static DecoderFactory DIRECT_DECODERS = new DecoderFactory().configureDirectDecoder(true); - - // column name for the schema storing serialized keyspace definitions - // NB: must be an invalid keyspace name - public static final ByteBuffer DEFINITION_SCHEMA_COLUMN_NAME = ByteBufferUtil.bytes("Avro/Schema"); - - public static final String OLD_MIGRATIONS_CF = "Migrations"; - public static final String OLD_SCHEMA_CF = "Schema"; - /* saves keyspace definitions to system schema columnfamilies */ public static synchronized void save(Collection<KSMetaData> keyspaces) { @@ -176,55 +156,6 @@ public class DefsTable } /** - * Loads a version of keyspace definitions from storage (using old SCHEMA_CF as a data source) - * Note: If definitions where found in SCHEMA_CF this method would load them into new schema handling table KEYSPACE_CF - * - * @param version The version of the latest migration. - * - * @return Collection of found keyspace definitions - * - * @throws IOException if failed to read SCHEMA_CF or failed to deserialize Avro schema - */ - public static synchronized Collection<KSMetaData> loadFromStorage(UUID version) throws IOException - { - DecoratedKey vkey = StorageService.getPartitioner().decorateKey(toUTF8Bytes(version)); - Table defs = Table.open(Table.SYSTEM_KS); - ColumnFamilyStore cfStore = defs.getColumnFamilyStore(OLD_SCHEMA_CF); - ColumnFamily cf = cfStore.getColumnFamily(QueryFilter.getIdentityFilter(vkey, OLD_SCHEMA_CF)); - Column avroschema = cf.getColumn(DEFINITION_SCHEMA_COLUMN_NAME); - - Collection<KSMetaData> keyspaces = Collections.emptyList(); - - if (avroschema != null) - { - ByteBuffer value = avroschema.value(); - org.apache.avro.Schema schema = org.apache.avro.Schema.parse(ByteBufferUtil.string(value)); - - // deserialize keyspaces using schema - keyspaces = new ArrayList<KSMetaData>(Iterables.size(cf)); - - for (Column column : cf) - { - if (column.name().equals(DEFINITION_SCHEMA_COLUMN_NAME)) - continue; - KsDef ks = deserializeAvro(schema, column.value(), new KsDef()); - keyspaces.add(Avro.ksFromAvro(ks)); - } - - // store deserialized keyspaces into new place - save(keyspaces); - - flushSchemaCFs(); - - logger.info("Truncating deprecated system column families (migrations, schema)..."); - dropColumnFamily(Table.SYSTEM_KS, OLD_MIGRATIONS_CF); - dropColumnFamily(Table.SYSTEM_KS, OLD_SCHEMA_CF); - } - - return keyspaces; - } - - /** * Merge remote schema in form of row mutations with local and mutate ks/cf metadata objects * (which also involves fs operations on add/drop ks/cf) * @@ -528,28 +459,4 @@ public class DefsTable { FBUtilities.waitOnFuture(SystemTable.schemaCFS(cfName).forceFlush()); } - - private static ByteBuffer toUTF8Bytes(UUID version) - { - return ByteBufferUtil.bytes(version.toString()); - } - - /** - * Deserialize a single object based on the given Schema. - * - * @param writer writer's schema - * @param bytes Array to deserialize from - * @param ob An empty object to deserialize into (must not be null). - * - * @return serialized Avro object - * - * @throws IOException if deserialization failed - */ - public static <T extends SpecificRecord> T deserializeAvro(org.apache.avro.Schema writer, ByteBuffer bytes, T ob) throws IOException - { - BinaryDecoder dec = DIRECT_DECODERS.createBinaryDecoder(ByteBufferUtil.getArray(bytes), null); - SpecificDatumReader<T> reader = new SpecificDatumReader<T>(writer); - reader.setExpected(ob.getSchema()); - return reader.read(ob, dec); - } } http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/test/unit/org/apache/cassandra/config/CFMetaDataTest.java ---------------------------------------------------------------------- diff --git a/test/unit/org/apache/cassandra/config/CFMetaDataTest.java b/test/unit/org/apache/cassandra/config/CFMetaDataTest.java index 1621776..3627cd8 100644 --- a/test/unit/org/apache/cassandra/config/CFMetaDataTest.java +++ b/test/unit/org/apache/cassandra/config/CFMetaDataTest.java @@ -64,7 +64,7 @@ public class CFMetaDataTest extends SchemaLoader } @Test - public void testThriftToAvroConversion() throws Exception + public void testThriftConversion() throws Exception { CfDef cfDef = new CfDef().setDefault_validation_class(AsciiType.class.getCanonicalName()) .setComment("Test comment") @@ -75,7 +75,6 @@ public class CFMetaDataTest extends SchemaLoader // convert Thrift to CFMetaData CFMetaData cfMetaData = CFMetaData.fromThrift(cfDef); - // make a correct Avro object CfDef thriftCfDef = new CfDef(); thriftCfDef.keyspace = KEYSPACE; thriftCfDef.name = COLUMN_FAMILY; http://git-wip-us.apache.org/repos/asf/cassandra/blob/6f5d8a56/test/unit/org/apache/cassandra/config/DatabaseDescriptorTest.java ---------------------------------------------------------------------- diff --git a/test/unit/org/apache/cassandra/config/DatabaseDescriptorTest.java b/test/unit/org/apache/cassandra/config/DatabaseDescriptorTest.java index 4433b57..ee14ab5 100644 --- a/test/unit/org/apache/cassandra/config/DatabaseDescriptorTest.java +++ b/test/unit/org/apache/cassandra/config/DatabaseDescriptorTest.java @@ -58,7 +58,6 @@ public class DatabaseDescriptorTest for (KSMetaData ksm : Schema.instance.getTableDefinitions()) { // Not testing round-trip on the KsDef via serDe() because maps - // cannot be compared in avro. KSMetaData ksmDupe = KSMetaData.fromThrift(ksm.toThrift()); assert ksmDupe != null; assert ksmDupe.equals(ksm);
