提交 9a03da77 编写于 作者: Y Yueh-Hsuan Chiang

Merge pull request #375 from fyrz/RocksJava-ColumnFamilyOptions-Extension-3.6

[RocksJava] ColumnFamilyOptions extension 3.6
......@@ -5,6 +5,7 @@ NATIVE_JAVA_CLASSES = org.rocksdb.AbstractComparator\
org.rocksdb.BlockBasedTableConfig\
org.rocksdb.BloomFilter\
org.rocksdb.ColumnFamilyHandle\
org.rocksdb.ColumnFamilyOptions\
org.rocksdb.Comparator\
org.rocksdb.ComparatorOptions\
org.rocksdb.DBOptions\
......@@ -69,6 +70,12 @@ sample: java
@rm -rf /tmp/rocksdbjni
@rm -rf /tmp/rocksdbjni_not_found
column_family_sample: java
javac -cp $(ROCKSDB_JAR) RocksDBColumnFamilySample.java
@rm -rf /tmp/rocksdbjni
java -ea -Djava.library.path=.:../ -cp ".:./*" -Xcheck:jni RocksDBColumnFamilySample /tmp/rocksdbjni
@rm -rf /tmp/rocksdbjni
test: java
@rm -rf /tmp/rocksdbjni_*
javac org/rocksdb/test/*.java
......@@ -77,14 +84,16 @@ test: java
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.BlockBasedTableConfigTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.DBOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ColumnFamilyTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ColumnFamilyOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.FilterTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.FlushTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.KeyMayExistTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MemTableTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MergeTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MixedOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.OptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.PlainTableConfigTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ReadOnlyTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MergeTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ReadOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.RocksIteratorTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.SnapshotTest
......
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
import org.rocksdb.*;
import java.util.ArrayList;
import java.util.List;
public class RocksDBColumnFamilySample {
static {
RocksDB.loadLibrary();
}
public static void main(String[] args) throws RocksDBException {
if (args.length < 1) {
System.out.println(
"usage: RocksDBColumnFamilySample db_path");
return;
}
String db_path = args[0];
System.out.println("RocksDBColumnFamilySample");
RocksDB db = null;
Options options = null;
ColumnFamilyHandle columnFamilyHandle = null;
WriteBatch wb = null;
try {
options = new Options().setCreateIfMissing(true);
db = RocksDB.open(options, db_path);
assert(db != null);
// create column family
columnFamilyHandle = db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf", new ColumnFamilyOptions()));
assert(columnFamilyHandle != null);
} finally {
if (columnFamilyHandle != null) {
columnFamilyHandle.dispose();
}
if (db != null) {
db.close();
db = null;
}
}
// open DB with two column families
List<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<>();
// have to open default column family
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(
RocksDB.DEFAULT_COLUMN_FAMILY, new ColumnFamilyOptions()));
// open the new one, too
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(
"new_cf", new ColumnFamilyOptions()));
List<ColumnFamilyHandle> columnFamilyHandles = new ArrayList<>();
try {
db = RocksDB.open(new DBOptions(), db_path,
columnFamilyDescriptors, columnFamilyHandles);
assert(db != null);
// put and get from non-default column family
db.put(columnFamilyHandles.get(0), new WriteOptions(),
"key".getBytes(), "value".getBytes());
String value = new String(db.get(columnFamilyHandles.get(0),
"key".getBytes()));
// atomic write
wb = new WriteBatch();
wb.put(columnFamilyHandles.get(0), "key2".getBytes(), "value2".getBytes());
wb.put(columnFamilyHandles.get(1), "key3".getBytes(), "value3".getBytes());
wb.remove(columnFamilyHandles.get(0), "key".getBytes());
db.write(new WriteOptions(), wb);
// drop column family
db.dropColumnFamily(columnFamilyHandles.get(1));
} finally {
for (ColumnFamilyHandle handle : columnFamilyHandles){
handle.dispose();
}
if (db != null) {
db.close();
}
if (wb != null) {
wb.dispose();
}
}
}
}
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb;
/**
* <p>Describes a column family with a
* name and respective Options.</p>
*/
public class ColumnFamilyDescriptor {
/**
* <p>Creates a new Column Family using a name and default
* options,</p>
*
* @param columnFamilyName name of column family.
*/
public ColumnFamilyDescriptor(final String columnFamilyName){
this(columnFamilyName, new ColumnFamilyOptions());
}
/**
* <p>Creates a new Column Family using a name and custom
* options.</p>
*
* @param columnFamilyName name of column family.
* @param columnFamilyOptions options to be used with
* column family.
*/
public ColumnFamilyDescriptor(final String columnFamilyName,
final ColumnFamilyOptions columnFamilyOptions) {
columnFamilyName_ = columnFamilyName;
columnFamilyOptions_ = columnFamilyOptions;
}
/**
* Retrieve name of column family.
*
* @return column family name.
*/
public String columnFamilyName() {
return columnFamilyName_;
}
/**
* Retrieve assigned options instance.
*
* @return Options instance assigned to this instance.
*/
public ColumnFamilyOptions columnFamilyOptions() {
return columnFamilyOptions_;
}
private final String columnFamilyName_;
private final ColumnFamilyOptions columnFamilyOptions_;
}
此差异已折叠。
......@@ -29,6 +29,20 @@ public class Options extends RocksObject
env_ = RocksEnv.getDefault();
}
/**
* Construct options for opening a RocksDB. Reusing database options
* and column family options.
*
* @param dbOptions {@link org.rocksdb.DBOptions} instance
* @param columnFamilyOptions {@link org.rocksdb.ColumnFamilyOptions}
* instance
*/
public Options(DBOptions dbOptions, ColumnFamilyOptions columnFamilyOptions) {
super();
newOptions(dbOptions.nativeHandle_, columnFamilyOptions.nativeHandle_);
env_ = RocksEnv.getDefault();
}
@Override
public Options setCreateIfMissing(boolean flag) {
assert(isInitialized());
......
......@@ -16,6 +16,7 @@ import org.rocksdb.util.Environment;
* indicates sth wrong at the RocksDB library side and the call failed.
*/
public class RocksDB extends RocksObject {
public static final String DEFAULT_COLUMN_FAMILY = "default";
public static final int NOT_FOUND = -1;
private static final String[] compressionLibs_ = {
"snappy", "z", "bzip2", "lz4", "lz4hc"};
......@@ -123,21 +124,22 @@ public class RocksDB extends RocksObject {
* </p>
*
* @param path the path to the rocksdb.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
*
* @throws org.rocksdb.RocksDBException
* @see Options#setCreateIfMissing(boolean)
* @see DBOptions#setCreateIfMissing(boolean)
*/
public static RocksDB open(String path, List<String> columnFamilyNames,
public static RocksDB open(String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles) throws RocksDBException {
// This allows to use the rocksjni default Options instead of
// the c++ one.
Options options = new Options();
return open(options, path, columnFamilyNames, columnFamilyHandles);
DBOptions options = new DBOptions();
return open(options, path, columnFamilyDescriptors, columnFamilyHandles);
}
/**
......@@ -196,24 +198,25 @@ public class RocksDB extends RocksObject {
* <p>
* ColumnFamily handles are disposed when the RocksDB instance is disposed.</p>
*
* @param options {@link org.rocksdb.Options} instance.
* @param options {@link org.rocksdb.DBOptions} instance.
* @param path the path to the rocksdb.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
*
* @throws org.rocksdb.RocksDBException
* @see Options#setCreateIfMissing(boolean)
* @see DBOptions#setCreateIfMissing(boolean)
*/
public static RocksDB open(Options options, String path, List<String> columnFamilyNames,
public static RocksDB open(DBOptions options, String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles)
throws RocksDBException {
RocksDB db = new RocksDB();
List<Long> cfReferences = db.open(options.nativeHandle_, path,
columnFamilyNames, columnFamilyNames.size());
for (int i=0; i<columnFamilyNames.size(); i++) {
columnFamilyDescriptors, columnFamilyDescriptors.size());
for (int i = 0; i < columnFamilyDescriptors.size(); i++) {
columnFamilyHandles.add(new ColumnFamilyHandle(db, cfReferences.get(i)));
}
db.storeOptionsInstance(options);
......@@ -244,19 +247,21 @@ public class RocksDB extends RocksObject {
* options.
*
* @param path the path to the RocksDB.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
* @throws RocksDBException
*/
public static RocksDB openReadOnly(String path, List<String> columnFamilyNames,
public static RocksDB openReadOnly(String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles) throws RocksDBException {
// This allows to use the rocksjni default Options instead of
// the c++ one.
Options options = new Options();
return openReadOnly(options, path, columnFamilyNames, columnFamilyHandles);
DBOptions options = new DBOptions();
return openReadOnly(options, path, columnFamilyDescriptors,
columnFamilyHandles);
}
/**
......@@ -297,25 +302,26 @@ public class RocksDB extends RocksObject {
* options instance have been closed. If user doesn't call options dispose
* explicitly,then this options instance will be GC'd automatically.</p>
*
* @param options {@link Options} instance.
* @param options {@link DBOptions} instance.
* @param path the path to the RocksDB.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
* @throws RocksDBException
*/
public static RocksDB openReadOnly(Options options, String path,
List<String> columnFamilyNames, List<ColumnFamilyHandle> columnFamilyHandles)
public static RocksDB openReadOnly(DBOptions options, String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles)
throws RocksDBException {
// when non-default Options is used, keeping an Options reference
// in RocksDB can prevent Java to GC during the life-time of
// the currently-created RocksDB.
RocksDB db = new RocksDB();
List<Long> cfReferences = db.openROnly(options.nativeHandle_, path,
columnFamilyNames, columnFamilyNames.size());
for (int i=0; i<columnFamilyNames.size(); i++) {
columnFamilyDescriptors, columnFamilyDescriptors.size());
for (int i=0; i<columnFamilyDescriptors.size(); i++) {
columnFamilyHandles.add(new ColumnFamilyHandle(db, cfReferences.get(i)));
}
......@@ -337,7 +343,7 @@ public class RocksDB extends RocksObject {
return RocksDB.listColumnFamilies(options.nativeHandle_, path);
}
private void storeOptionsInstance(Options options) {
private void storeOptionsInstance(DBOptionsInterface options) {
options_ = options;
}
......@@ -1059,14 +1065,15 @@ public class RocksDB extends RocksObject {
* allocates a ColumnFamilyHandle within an internal structure.
* The ColumnFamilyHandle is automatically disposed with DB disposal.
*
* @param columnFamilyName Name of column family to be created.
* @param columnFamilyDescriptor column family to be created.
* @return {@link org.rocksdb.ColumnFamilyHandle} instance
* @see RocksDBException
*/
public ColumnFamilyHandle createColumnFamily(String columnFamilyName)
public ColumnFamilyHandle createColumnFamily(
ColumnFamilyDescriptor columnFamilyDescriptor)
throws RocksDBException {
return new ColumnFamilyHandle(this, createColumnFamily(nativeHandle_,
options_.nativeHandle_, columnFamilyName));
columnFamilyDescriptor));
}
/**
......@@ -1130,15 +1137,17 @@ public class RocksDB extends RocksObject {
protected native void open(
long optionsHandle, String path) throws RocksDBException;
protected native List<Long> open(long optionsHandle, String path,
List<String> columnFamilyNames, int columnFamilyNamesLength)
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
int columnFamilyDescriptorsLength)
throws RocksDBException;
protected native static List<byte[]> listColumnFamilies(
long optionsHandle, String path) throws RocksDBException;
protected native void openROnly(
long optionsHandle, String path) throws RocksDBException;
protected native List<Long> openROnly(
long optionsHandle, String path, List<String> columnFamilyNames,
int columnFamilyNamesLength) throws RocksDBException;
long optionsHandle, String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
int columnFamilyDescriptorsLength) throws RocksDBException;
protected native void put(
long handle, byte[] key, int keyLen,
byte[] value, int valueLen) throws RocksDBException;
......@@ -1231,13 +1240,13 @@ public class RocksDB extends RocksObject {
protected native void releaseSnapshot(
long nativeHandle, long snapshotHandle);
private native void disposeInternal(long handle);
private native long createColumnFamily(long handle, long opt_handle,
String name) throws RocksDBException;
private native long createColumnFamily(long handle,
ColumnFamilyDescriptor columnFamilyDescriptor) throws RocksDBException;
private native void dropColumnFamily(long handle, long cfHandle) throws RocksDBException;
private native void flush(long handle, long flushOptHandle)
throws RocksDBException;
private native void flush(long handle, long flushOptHandle,
long cfHandle) throws RocksDBException;
protected Options options_;
protected DBOptionsInterface options_;
}
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb.test;
import org.rocksdb.*;
import java.util.Random;
public class ColumnFamilyOptionsTest {
static {
RocksDB.loadLibrary();
}
public static void testCFOptions(ColumnFamilyOptionsInterface opt) {
Random rand = PlatformRandomHelper.
getPlatformSpecificRandomFactory();
{ // WriteBufferSize test
try {
long longValue = rand.nextLong();
opt.setWriteBufferSize(longValue);
assert(opt.writeBufferSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MaxWriteBufferNumber test
int intValue = rand.nextInt();
opt.setMaxWriteBufferNumber(intValue);
assert(opt.maxWriteBufferNumber() == intValue);
}
{ // MinWriteBufferNumberToMerge test
int intValue = rand.nextInt();
opt.setMinWriteBufferNumberToMerge(intValue);
assert(opt.minWriteBufferNumberToMerge() == intValue);
}
{ // NumLevels test
int intValue = rand.nextInt();
opt.setNumLevels(intValue);
assert(opt.numLevels() == intValue);
}
{ // LevelFileNumCompactionTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroFileNumCompactionTrigger(intValue);
assert(opt.levelZeroFileNumCompactionTrigger() == intValue);
}
{ // LevelSlowdownWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroSlowdownWritesTrigger(intValue);
assert(opt.levelZeroSlowdownWritesTrigger() == intValue);
}
{ // LevelStopWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroStopWritesTrigger(intValue);
assert(opt.levelZeroStopWritesTrigger() == intValue);
}
{ // MaxMemCompactionLevel test
int intValue = rand.nextInt();
opt.setMaxMemCompactionLevel(intValue);
assert(opt.maxMemCompactionLevel() == intValue);
}
{ // TargetFileSizeBase test
long longValue = rand.nextLong();
opt.setTargetFileSizeBase(longValue);
assert(opt.targetFileSizeBase() == longValue);
}
{ // TargetFileSizeMultiplier test
int intValue = rand.nextInt();
opt.setTargetFileSizeMultiplier(intValue);
assert(opt.targetFileSizeMultiplier() == intValue);
}
{ // MaxBytesForLevelBase test
long longValue = rand.nextLong();
opt.setMaxBytesForLevelBase(longValue);
assert(opt.maxBytesForLevelBase() == longValue);
}
{ // MaxBytesForLevelMultiplier test
int intValue = rand.nextInt();
opt.setMaxBytesForLevelMultiplier(intValue);
assert(opt.maxBytesForLevelMultiplier() == intValue);
}
{ // ExpandedCompactionFactor test
int intValue = rand.nextInt();
opt.setExpandedCompactionFactor(intValue);
assert(opt.expandedCompactionFactor() == intValue);
}
{ // SourceCompactionFactor test
int intValue = rand.nextInt();
opt.setSourceCompactionFactor(intValue);
assert(opt.sourceCompactionFactor() == intValue);
}
{ // MaxGrandparentOverlapFactor test
int intValue = rand.nextInt();
opt.setMaxGrandparentOverlapFactor(intValue);
assert(opt.maxGrandparentOverlapFactor() == intValue);
}
{ // SoftRateLimit test
double doubleValue = rand.nextDouble();
opt.setSoftRateLimit(doubleValue);
assert(opt.softRateLimit() == doubleValue);
}
{ // HardRateLimit test
double doubleValue = rand.nextDouble();
opt.setHardRateLimit(doubleValue);
assert(opt.hardRateLimit() == doubleValue);
}
{ // RateLimitDelayMaxMilliseconds test
int intValue = rand.nextInt();
opt.setRateLimitDelayMaxMilliseconds(intValue);
assert(opt.rateLimitDelayMaxMilliseconds() == intValue);
}
{ // ArenaBlockSize test
try {
long longValue = rand.nextLong();
opt.setArenaBlockSize(longValue);
assert(opt.arenaBlockSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // DisableAutoCompactions test
boolean boolValue = rand.nextBoolean();
opt.setDisableAutoCompactions(boolValue);
assert(opt.disableAutoCompactions() == boolValue);
}
{ // PurgeRedundantKvsWhileFlush test
boolean boolValue = rand.nextBoolean();
opt.setPurgeRedundantKvsWhileFlush(boolValue);
assert(opt.purgeRedundantKvsWhileFlush() == boolValue);
}
{ // VerifyChecksumsInCompaction test
boolean boolValue = rand.nextBoolean();
opt.setVerifyChecksumsInCompaction(boolValue);
assert(opt.verifyChecksumsInCompaction() == boolValue);
}
{ // FilterDeletes test
boolean boolValue = rand.nextBoolean();
opt.setFilterDeletes(boolValue);
assert(opt.filterDeletes() == boolValue);
}
{ // MaxSequentialSkipInIterations test
long longValue = rand.nextLong();
opt.setMaxSequentialSkipInIterations(longValue);
assert(opt.maxSequentialSkipInIterations() == longValue);
}
{ // InplaceUpdateSupport test
boolean boolValue = rand.nextBoolean();
opt.setInplaceUpdateSupport(boolValue);
assert(opt.inplaceUpdateSupport() == boolValue);
}
{ // InplaceUpdateNumLocks test
try {
long longValue = rand.nextLong();
opt.setInplaceUpdateNumLocks(longValue);
assert(opt.inplaceUpdateNumLocks() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MemtablePrefixBloomBits test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomBits(intValue);
assert(opt.memtablePrefixBloomBits() == intValue);
}
{ // MemtablePrefixBloomProbes test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomProbes(intValue);
assert(opt.memtablePrefixBloomProbes() == intValue);
}
{ // BloomLocality test
int intValue = rand.nextInt();
opt.setBloomLocality(intValue);
assert(opt.bloomLocality() == intValue);
}
{ // MaxSuccessiveMerges test
try {
long longValue = rand.nextLong();
opt.setMaxSuccessiveMerges(longValue);
assert(opt.maxSuccessiveMerges() == longValue);
} catch (RocksDBException e){
assert(false);
}
}
{ // MinPartialMergeOperands test
int intValue = rand.nextInt();
opt.setMinPartialMergeOperands(intValue);
assert(opt.minPartialMergeOperands() == intValue);
}
}
public static void main(String[] args) {
ColumnFamilyOptions opt = new ColumnFamilyOptions();
testCFOptions(opt);
opt.dispose();
System.out.println("Passed DBOptionsTest");
}
}
......@@ -22,6 +22,10 @@ public class ColumnFamilyTest {
RocksDB db = null;
Options options = new Options();
options.setCreateIfMissing(true);
DBOptions dbOptions = new DBOptions();
dbOptions.setCreateIfMissing(true);
try {
db = RocksDB.open(options, db_path);
} catch (RocksDBException e) {
......@@ -43,7 +47,8 @@ public class ColumnFamilyTest {
// Test createColumnFamily
try {
db.createColumnFamily("new_cf");
db.createColumnFamily(new ColumnFamilyDescriptor("new_cf",
new ColumnFamilyOptions()));
} catch (RocksDBException e) {
assert(false);
}
......@@ -67,14 +72,15 @@ public class ColumnFamilyTest {
}
// Test open database with column family names
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfNames =
new ArrayList<>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
new ArrayList<>();
cfNames.add(new ColumnFamilyDescriptor("default"));
cfNames.add(new ColumnFamilyDescriptor("new_cf"));
try {
db = RocksDB.open(options, db_path, cfNames, columnFamilyHandleList);
db = RocksDB.open(dbOptions, db_path, cfNames, columnFamilyHandleList);
assert(columnFamilyHandleList.size() == 2);
db.put("dfkey1".getBytes(), "dfvalue".getBytes());
db.put(columnFamilyHandleList.get(0), "dfkey2".getBytes(),
......@@ -100,7 +106,8 @@ public class ColumnFamilyTest {
// Test create write to and drop ColumnFamily
ColumnFamilyHandle tmpColumnFamilyHandle = null;
try {
tmpColumnFamilyHandle = db.createColumnFamily("tmpCF");
tmpColumnFamilyHandle = db.createColumnFamily(
new ColumnFamilyDescriptor("tmpCF", new ColumnFamilyOptions()));
db.put(tmpColumnFamilyHandle, "key".getBytes(), "value".getBytes());
db.dropColumnFamily(tmpColumnFamilyHandle);
tmpColumnFamilyHandle.dispose();
......@@ -215,8 +222,6 @@ public class ColumnFamilyTest {
.equals("value"));
} catch (RocksDBException e) {
assert(false);
} catch (IllegalArgumentException e) {
assert(false);
}
// Test multiget without correct number of column
......
......@@ -17,16 +17,17 @@ public class KeyMayExistTest {
public static void main(String[] args){
RocksDB db;
Options options = new Options();
DBOptions options = new DBOptions();
options.setCreateIfMissing(true)
.setCreateMissingColumnFamilies(true);
try {
// open database using cf names
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfNames =
new ArrayList<ColumnFamilyDescriptor>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
cfNames.add(new ColumnFamilyDescriptor("default"));
cfNames.add(new ColumnFamilyDescriptor("new_cf"));
db = RocksDB.open(options, DB_PATH, cfNames, columnFamilyHandleList);
assert(columnFamilyHandleList.size()==2);
......
......@@ -7,7 +7,6 @@ package org.rocksdb.test;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import org.rocksdb.*;
public class MergeTest {
......@@ -41,18 +40,22 @@ public class MergeTest {
public static void testCFStringOption()
throws InterruptedException, RocksDBException {
Options opt = new Options();
DBOptions opt = new DBOptions();
opt.setCreateIfMissing(true);
opt.setCreateMissingColumnFamilies(true);
opt.setMergeOperatorName("stringappend");
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfDescr =
new ArrayList<ColumnFamilyDescriptor>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
cfDescr.add(new ColumnFamilyDescriptor("default",
new ColumnFamilyOptions().setMergeOperatorName(
"stringappend")));
cfDescr.add(new ColumnFamilyDescriptor("default",
new ColumnFamilyOptions().setMergeOperatorName(
"stringappend")));
RocksDB db = RocksDB.open(opt, db_cf_path_string,
cfNames, columnFamilyHandleList);
cfDescr, columnFamilyHandleList);
// writing aa under key
db.put(columnFamilyHandleList.get(1),
......@@ -97,19 +100,23 @@ public class MergeTest {
public static void testCFOperatorOption()
throws InterruptedException, RocksDBException {
Options opt = new Options();
DBOptions opt = new DBOptions();
opt.setCreateIfMissing(true);
opt.setCreateMissingColumnFamilies(true);
StringAppendOperator stringAppendOperator = new StringAppendOperator();
opt.setMergeOperator(stringAppendOperator);
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfDescr =
new ArrayList<ColumnFamilyDescriptor>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
cfDescr.add(new ColumnFamilyDescriptor("default",
new ColumnFamilyOptions().setMergeOperator(
stringAppendOperator)));
cfDescr.add(new ColumnFamilyDescriptor("new_cf",
new ColumnFamilyOptions().setMergeOperator(
stringAppendOperator)));
RocksDB db = RocksDB.open(opt, db_path_operator,
cfNames, columnFamilyHandleList);
cfDescr, columnFamilyHandleList);
// writing aa under key
db.put(columnFamilyHandleList.get(1),
......@@ -121,7 +128,10 @@ public class MergeTest {
String strValue = new String(value);
// Test also with createColumnFamily
ColumnFamilyHandle columnFamilyHandle = db.createColumnFamily("new_cf2");
ColumnFamilyHandle columnFamilyHandle = db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf2",
new ColumnFamilyOptions().setMergeOperator(
new StringAppendOperator())));
// writing xx under cfkey2
db.put(columnFamilyHandle, "cfkey2".getBytes(), "xx".getBytes());
// merge yy under cfkey2
......
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb.test;
import org.rocksdb.*;
public class MixedOptionsTest {
static {
RocksDB.loadLibrary();
}
public static void main(String[] args) {
// Set a table factory and check the names
ColumnFamilyOptions cfOptions = new ColumnFamilyOptions();
cfOptions.setTableFormatConfig(new BlockBasedTableConfig().
setFilter(new BloomFilter()));
assert(cfOptions.tableFactoryName().equals(
"BlockBasedTable"));
cfOptions.setTableFormatConfig(new PlainTableConfig());
assert(cfOptions.tableFactoryName().equals("PlainTable"));
// Initialize a dbOptions object from cf options and
// db options
DBOptions dbOptions = new DBOptions();
Options options = new Options(dbOptions, cfOptions);
assert(options.tableFactoryName().equals("PlainTable"));
// Free instances
options.dispose();
options = null;
cfOptions.dispose();
cfOptions = null;
dbOptions.dispose();
dbOptions = null;
System.gc();
System.runFinalization();
// Test Optimize for statements
cfOptions = new ColumnFamilyOptions();
cfOptions.optimizeUniversalStyleCompaction();
cfOptions.optimizeLevelStyleCompaction();
cfOptions.optimizeForPointLookup(1024);
options = new Options();
options.optimizeLevelStyleCompaction();
options.optimizeLevelStyleCompaction(400);
options.optimizeUniversalStyleCompaction();
options.optimizeUniversalStyleCompaction(400);
options.optimizeForPointLookup(1024);
options.prepareForBulkLoad();
System.out.println("Mixed options test passed");
}
}
......@@ -6,10 +6,7 @@
package org.rocksdb.test;
import java.util.Random;
import org.rocksdb.DBOptions;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.rocksdb.Options;
public class OptionsTest {
......@@ -23,208 +20,7 @@ public class OptionsTest {
getPlatformSpecificRandomFactory();
DBOptionsTest.testDBOptions(opt);
{ // WriteBufferSize test
try {
long longValue = rand.nextLong();
opt.setWriteBufferSize(longValue);
assert(opt.writeBufferSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MaxWriteBufferNumber test
int intValue = rand.nextInt();
opt.setMaxWriteBufferNumber(intValue);
assert(opt.maxWriteBufferNumber() == intValue);
}
{ // MinWriteBufferNumberToMerge test
int intValue = rand.nextInt();
opt.setMinWriteBufferNumberToMerge(intValue);
assert(opt.minWriteBufferNumberToMerge() == intValue);
}
{ // NumLevels test
int intValue = rand.nextInt();
opt.setNumLevels(intValue);
assert(opt.numLevels() == intValue);
}
{ // LevelFileNumCompactionTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroFileNumCompactionTrigger(intValue);
assert(opt.levelZeroFileNumCompactionTrigger() == intValue);
}
{ // LevelSlowdownWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroSlowdownWritesTrigger(intValue);
assert(opt.levelZeroSlowdownWritesTrigger() == intValue);
}
{ // LevelStopWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroStopWritesTrigger(intValue);
assert(opt.levelZeroStopWritesTrigger() == intValue);
}
{ // MaxMemCompactionLevel test
int intValue = rand.nextInt();
opt.setMaxMemCompactionLevel(intValue);
assert(opt.maxMemCompactionLevel() == intValue);
}
{ // TargetFileSizeBase test
long longValue = rand.nextLong();
opt.setTargetFileSizeBase(longValue);
assert(opt.targetFileSizeBase() == longValue);
}
{ // TargetFileSizeMultiplier test
int intValue = rand.nextInt();
opt.setTargetFileSizeMultiplier(intValue);
assert(opt.targetFileSizeMultiplier() == intValue);
}
{ // MaxBytesForLevelBase test
long longValue = rand.nextLong();
opt.setMaxBytesForLevelBase(longValue);
assert(opt.maxBytesForLevelBase() == longValue);
}
{ // MaxBytesForLevelMultiplier test
int intValue = rand.nextInt();
opt.setMaxBytesForLevelMultiplier(intValue);
assert(opt.maxBytesForLevelMultiplier() == intValue);
}
{ // ExpandedCompactionFactor test
int intValue = rand.nextInt();
opt.setExpandedCompactionFactor(intValue);
assert(opt.expandedCompactionFactor() == intValue);
}
{ // SourceCompactionFactor test
int intValue = rand.nextInt();
opt.setSourceCompactionFactor(intValue);
assert(opt.sourceCompactionFactor() == intValue);
}
{ // MaxGrandparentOverlapFactor test
int intValue = rand.nextInt();
opt.setMaxGrandparentOverlapFactor(intValue);
assert(opt.maxGrandparentOverlapFactor() == intValue);
}
{ // SoftRateLimit test
double doubleValue = rand.nextDouble();
opt.setSoftRateLimit(doubleValue);
assert(opt.softRateLimit() == doubleValue);
}
{ // HardRateLimit test
double doubleValue = rand.nextDouble();
opt.setHardRateLimit(doubleValue);
assert(opt.hardRateLimit() == doubleValue);
}
{ // RateLimitDelayMaxMilliseconds test
int intValue = rand.nextInt();
opt.setRateLimitDelayMaxMilliseconds(intValue);
assert(opt.rateLimitDelayMaxMilliseconds() == intValue);
}
{ // ArenaBlockSize test
try {
long longValue = rand.nextLong();
opt.setArenaBlockSize(longValue);
assert(opt.arenaBlockSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // DisableAutoCompactions test
boolean boolValue = rand.nextBoolean();
opt.setDisableAutoCompactions(boolValue);
assert(opt.disableAutoCompactions() == boolValue);
}
{ // PurgeRedundantKvsWhileFlush test
boolean boolValue = rand.nextBoolean();
opt.setPurgeRedundantKvsWhileFlush(boolValue);
assert(opt.purgeRedundantKvsWhileFlush() == boolValue);
}
{ // VerifyChecksumsInCompaction test
boolean boolValue = rand.nextBoolean();
opt.setVerifyChecksumsInCompaction(boolValue);
assert(opt.verifyChecksumsInCompaction() == boolValue);
}
{ // FilterDeletes test
boolean boolValue = rand.nextBoolean();
opt.setFilterDeletes(boolValue);
assert(opt.filterDeletes() == boolValue);
}
{ // MaxSequentialSkipInIterations test
long longValue = rand.nextLong();
opt.setMaxSequentialSkipInIterations(longValue);
assert(opt.maxSequentialSkipInIterations() == longValue);
}
{ // InplaceUpdateSupport test
boolean boolValue = rand.nextBoolean();
opt.setInplaceUpdateSupport(boolValue);
assert(opt.inplaceUpdateSupport() == boolValue);
}
{ // InplaceUpdateNumLocks test
try {
long longValue = rand.nextLong();
opt.setInplaceUpdateNumLocks(longValue);
assert(opt.inplaceUpdateNumLocks() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MemtablePrefixBloomBits test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomBits(intValue);
assert(opt.memtablePrefixBloomBits() == intValue);
}
{ // MemtablePrefixBloomProbes test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomProbes(intValue);
assert(opt.memtablePrefixBloomProbes() == intValue);
}
{ // BloomLocality test
int intValue = rand.nextInt();
opt.setBloomLocality(intValue);
assert(opt.bloomLocality() == intValue);
}
{ // MaxSuccessiveMerges test
try {
long longValue = rand.nextLong();
opt.setMaxSuccessiveMerges(longValue);
assert(opt.maxSuccessiveMerges() == longValue);
} catch (RocksDBException e){
assert(false);
}
}
{ // MinPartialMergeOperands test
int intValue = rand.nextInt();
opt.setMinPartialMergeOperands(intValue);
assert(opt.minPartialMergeOperands() == intValue);
}
ColumnFamilyOptionsTest.testCFOptions(opt);
opt.dispose();
System.out.println("Passed OptionsTest");
......
......@@ -34,12 +34,15 @@ public class ReadOnlyTest {
db2.close();
List<String> cfNames = new ArrayList<String>();
cfNames.add("default");
List<ColumnFamilyDescriptor> cfNames =
new ArrayList<ColumnFamilyDescriptor>();
cfNames.add(new ColumnFamilyDescriptor("default"));
db = RocksDB.open(DB_PATH, cfNames, columnFamilyHandleList);
columnFamilyHandleList.add(db.createColumnFamily("new_cf"));
columnFamilyHandleList.add(db.createColumnFamily("new_cf2"));
columnFamilyHandleList.add(db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf", new ColumnFamilyOptions())));
columnFamilyHandleList.add(db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf2", new ColumnFamilyOptions())));
db.put(columnFamilyHandleList.get(2), "key2".getBytes(),
"value2".getBytes());
......@@ -47,9 +50,10 @@ public class ReadOnlyTest {
assert(db2.get("key2".getBytes())==null);
assert(db2.get(columnFamilyHandleList.get(0), "key2".getBytes())==null);
List<String> cfNewName = new ArrayList<String>();
cfNewName.add("default");
cfNewName.add("new_cf2");
List<ColumnFamilyDescriptor> cfNewName =
new ArrayList<ColumnFamilyDescriptor>();
cfNewName.add(new ColumnFamilyDescriptor("default"));
cfNewName.add(new ColumnFamilyDescriptor("new_cf2"));
db3 = RocksDB.openReadOnly(DB_PATH, cfNewName, db3ColumnFamilyHandleList);
assert(new String(db3.get(db3ColumnFamilyHandleList.get(1),
"key2".getBytes())).equals("value2"));
......
......@@ -12,9 +12,8 @@
#include <memory>
#include "include/org_rocksdb_Options.h"
//TODO(fyrz) to be commented in with options refactoring pull requests
#include "include/org_rocksdb_DBOptions.h"
//#include "include/org_rocksdb_ColumnFamilyOptions.h"
#include "include/org_rocksdb_ColumnFamilyOptions.h"
#include "include/org_rocksdb_WriteOptions.h"
#include "include/org_rocksdb_ReadOptions.h"
#include "include/org_rocksdb_ComparatorOptions.h"
......@@ -1740,9 +1739,8 @@ void Java_org_rocksdb_Options_prepareForBulkLoad(
*/
void Java_org_rocksdb_ColumnFamilyOptions_newColumnFamilyOptions(
JNIEnv* env, jobject jobj) {
// TODO(fyrz) needs to be enabled back when ColumnFamilyOptions are available
// rocksdb::ColumnFamilyOptions* op = new rocksdb::ColumnFamilyOptions();
// rocksdb::ColumnFamilyOptionsJni::setHandle(env, jobj, op);
rocksdb::ColumnFamilyOptions* op = new rocksdb::ColumnFamilyOptions();
rocksdb::ColumnFamilyOptionsJni::setHandle(env, jobj, op);
}
/*
......
......@@ -159,6 +159,34 @@ class DBOptionsJni {
}
};
class ColumnFamilyDescriptorJni {
public:
// Get the java class id of org.rocksdb.ColumnFamilyDescriptor
static jclass getColumnFamilyDescriptorClass(JNIEnv* env) {
jclass jclazz = env->FindClass("org/rocksdb/ColumnFamilyDescriptor");
assert(jclazz != nullptr);
return jclazz;
}
// Get the java method id of columnFamilyName
static jmethodID getColumnFamilyNameMethod(JNIEnv* env) {
static jmethodID mid = env->GetMethodID(
getColumnFamilyDescriptorClass(env),
"columnFamilyName", "()Ljava/lang/String;");
assert(mid != nullptr);
return mid;
}
// Get the java method id of columnFamilyOptions
static jmethodID getColumnFamilyOptionsMethod(JNIEnv* env) {
static jmethodID mid = env->GetMethodID(
getColumnFamilyDescriptorClass(env),
"columnFamilyOptions", "()Lorg/rocksdb/ColumnFamilyOptions;");
assert(mid != nullptr);
return mid;
}
};
class ColumnFamilyOptionsJni {
public:
// Get the java class id of org.rocksdb.ColumnFamilyOptions.
......
......@@ -69,7 +69,7 @@ void Java_org_rocksdb_RocksDB_openROnly__JLjava_lang_String_2(
jobject
Java_org_rocksdb_RocksDB_openROnly__JLjava_lang_String_2Ljava_util_List_2I(
JNIEnv* env, jobject jdb, jlong jopt_handle, jstring jdb_path,
jobject jcfname_list, jint jcfname_count) {
jobject jcfdesc_list, jint jcfdesc_count) {
auto opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
rocksdb::DB* db = nullptr;
const char* db_path = env->GetStringUTFChars(jdb_path, 0);
......@@ -79,23 +79,34 @@ jobject
std::vector<rocksdb::ColumnFamilyDescriptor> column_families;
std::vector<rocksdb::ColumnFamilyHandle* > handles;
// get iterator for cfnames
// get iterator for ColumnFamilyDescriptors
jobject iteratorObj = env->CallObjectMethod(
jcfname_list, rocksdb::ListJni::getIteratorMethod(env));
jcfdesc_list, rocksdb::ListJni::getIteratorMethod(env));
// iterate over cfnames and convert cfnames to
// ColumnFamilyDescriptor instances
// iterate over ColumnFamilyDescriptors
while (env->CallBooleanMethod(
iteratorObj, rocksdb::ListJni::getHasNextMethod(env)) == JNI_TRUE) {
jstring jstr = (jstring) env->CallObjectMethod(iteratorObj,
// get ColumnFamilyDescriptor
jobject jcf_descriptor = env->CallObjectMethod(iteratorObj,
rocksdb::ListJni::getNextMethod(env));
// get ColumnFamilyName
jstring jstr = (jstring) env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyNameMethod(
env));
// get CF Options
jobject jcf_opt_obj = env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyOptionsMethod(
env));
rocksdb::ColumnFamilyOptions* cfOptions =
rocksdb::ColumnFamilyOptionsJni::getHandle(env, jcf_opt_obj);
const char* cfname = env->GetStringUTFChars(jstr, 0);
// free allocated cfnames after call to open
cfnames_to_free.push_back(cfname);
jcfnames_for_free.push_back(jstr);
column_families.push_back(rocksdb::ColumnFamilyDescriptor(cfname,
*static_cast<rocksdb::ColumnFamilyOptions*>(opt)));
*cfOptions));
}
rocksdb::Status s = rocksdb::DB::OpenForReadOnly(*opt,
......@@ -141,7 +152,7 @@ jobject
*/
jobject Java_org_rocksdb_RocksDB_open__JLjava_lang_String_2Ljava_util_List_2I(
JNIEnv* env, jobject jdb, jlong jopt_handle, jstring jdb_path,
jobject jcfname_list, jint jcfname_count) {
jobject jcfdesc_list, jint jcfdesc_count) {
auto opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
rocksdb::DB* db = nullptr;
const char* db_path = env->GetStringUTFChars(jdb_path, 0);
......@@ -151,23 +162,34 @@ jobject Java_org_rocksdb_RocksDB_open__JLjava_lang_String_2Ljava_util_List_2I(
std::vector<rocksdb::ColumnFamilyDescriptor> column_families;
std::vector<rocksdb::ColumnFamilyHandle* > handles;
// get iterator for cfnames
// get iterator for ColumnFamilyDescriptors
jobject iteratorObj = env->CallObjectMethod(
jcfname_list, rocksdb::ListJni::getIteratorMethod(env));
jcfdesc_list, rocksdb::ListJni::getIteratorMethod(env));
// iterate over cfnames and convert cfnames to
// ColumnFamilyDescriptor instances
// iterate over ColumnFamilyDescriptors
while (env->CallBooleanMethod(
iteratorObj, rocksdb::ListJni::getHasNextMethod(env)) == JNI_TRUE) {
jstring jstr = (jstring) env->CallObjectMethod(iteratorObj,
// get ColumnFamilyDescriptor
jobject jcf_descriptor = env->CallObjectMethod(iteratorObj,
rocksdb::ListJni::getNextMethod(env));
// get ColumnFamilyName
jstring jstr = (jstring) env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyNameMethod(
env));
// get CF Options
jobject jcf_opt_obj = env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyOptionsMethod(
env));
rocksdb::ColumnFamilyOptions* cfOptions =
rocksdb::ColumnFamilyOptionsJni::getHandle(env, jcf_opt_obj);
const char* cfname = env->GetStringUTFChars(jstr, 0);
// free allocated cfnames after call to open
cfnames_to_free.push_back(cfname);
jcfnames_for_free.push_back(jstr);
column_families.push_back(rocksdb::ColumnFamilyDescriptor(cfname,
*static_cast<rocksdb::ColumnFamilyOptions*>(opt)));
*cfOptions));
}
rocksdb::Status s = rocksdb::DB::Open(*opt, db_path, column_families,
......@@ -1151,18 +1173,28 @@ jlongArray Java_org_rocksdb_RocksDB_iterators(
/*
* Class: org_rocksdb_RocksDB
* Method: createColumnFamily
* Signature: (JJLjava/lang/String;)J;
* Signature: (JLorg/rocksdb/ColumnFamilyDescriptor;)J;
*/
jlong Java_org_rocksdb_RocksDB_createColumnFamily(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jopt_handle,
jstring jcfname) {
JNIEnv* env, jobject jdb, jlong jdb_handle,
jobject jcf_descriptor) {
rocksdb::ColumnFamilyHandle* handle;
const char* cfname = env->GetStringUTFChars(jcfname, 0);
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
jstring jstr = (jstring) env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyNameMethod(
env));
// get CF Options
jobject jcf_opt_obj = env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyOptionsMethod(
env));
rocksdb::ColumnFamilyOptions* cfOptions =
rocksdb::ColumnFamilyOptionsJni::getHandle(env, jcf_opt_obj);
const char* cfname = env->GetStringUTFChars(jstr, 0);
rocksdb::Status s = db_handle->CreateColumnFamily(
*static_cast<rocksdb::ColumnFamilyOptions*>(opt), cfname, &handle);
env->ReleaseStringUTFChars(jcfname, cfname);
*cfOptions, cfname, &handle);
env->ReleaseStringUTFChars(jstr, cfname);
if (s.ok()) {
return reinterpret_cast<jlong>(handle);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册