提交 52eb06cb 编写于 作者: V Vlad Ilyushchenko

MultiMap, prepared for resampling and joins

上级 e01eab3d
...@@ -18,25 +18,33 @@ package com.nfsdb.journal.collections; ...@@ -18,25 +18,33 @@ package com.nfsdb.journal.collections;
import com.nfsdb.journal.column.ColumnType; import com.nfsdb.journal.column.ColumnType;
import com.nfsdb.journal.exceptions.JournalRuntimeException; import com.nfsdb.journal.exceptions.JournalRuntimeException;
import com.nfsdb.journal.factory.configuration.ColumnMetadata;
import com.nfsdb.journal.lang.cst.AbstractDataRow;
import com.nfsdb.journal.lang.cst.DataRow;
import com.nfsdb.journal.utils.Hash; import com.nfsdb.journal.utils.Hash;
import com.nfsdb.journal.utils.Unsafe; import com.nfsdb.journal.utils.Unsafe;
import java.io.Closeable; import java.io.Closeable;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompositeKeyIntMap.Entry> { public class MultiMap implements Closeable, Iterable<MultiMap.Record> {
private final int seed = 0xdeadbeef; private final int seed = 0xdeadbeef;
private final double loadFactor; private final float loadFactor;
private final Key key = new Key(); private final Key key = new Key();
private final Entry entry = new Entry(); private final Values values0 = new Values();
private final EntryIterator iterator = new EntryIterator(); private final Record record = new Record();
private final ColumnType[] keyColumnTypes; private final RecordIterator iterator = new RecordIterator();
private final ColumnType[] valueColumnTypes; private final List<ColumnMetadata> keyColumns;
private final List<ColumnMetadata> valueColumns;
private final int valueOffsets[]; private final int valueOffsets[];
private int valueBlockLen; private final int columnSplit;
private int keyBlockOffset;
private int keyDataOffset; private int keyDataOffset;
private DirectIntList values;
private DirectLongList keyOffsets; private DirectLongList keyOffsets;
private long kAddress; private long kAddress;
private long kStart; private long kStart;
...@@ -44,34 +52,31 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -44,34 +52,31 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
private long kPos; private long kPos;
private int free; private int free;
private long keyCapacity; private long keyCapacity;
private int size = 0;
public DirectCompositeKeyIntMap(ColumnType[] keyColumnTypes, ColumnType[] valueColumnTypes) { private MultiMap(long capacity, long dataSize, float loadFactor, List<ColumnMetadata> valueColumns, List<ColumnMetadata> keyColumns) {
this(67, 4 * 1024, 0.5d, keyColumnTypes, valueColumnTypes);
}
public DirectCompositeKeyIntMap(long capacity, long keyAreaCapacity, double loadFactor, ColumnType[] keyColumnTypes, ColumnType[] valueColumnTypes) {
this.loadFactor = loadFactor; this.loadFactor = loadFactor;
this.kAddress = Unsafe.getUnsafe().allocateMemory(keyAreaCapacity + AbstractDirectList.CACHE_LINE_SIZE); this.kAddress = Unsafe.getUnsafe().allocateMemory(dataSize + AbstractDirectList.CACHE_LINE_SIZE);
this.kStart = kPos = this.kAddress + (this.kAddress & (AbstractDirectList.CACHE_LINE_SIZE - 1)); this.kStart = kPos = this.kAddress + (this.kAddress & (AbstractDirectList.CACHE_LINE_SIZE - 1));
this.kLimit = kStart + keyAreaCapacity; this.kLimit = kStart + dataSize;
this.keyCapacity = Primes.next((long) (capacity / loadFactor)); this.keyCapacity = Primes.next((long) (capacity / loadFactor));
this.free = (int) (keyCapacity * loadFactor); this.free = (int) (keyCapacity * loadFactor);
this.keyOffsets = new DirectLongList(keyCapacity); this.keyOffsets = new DirectLongList(keyCapacity);
this.keyOffsets.zero((byte) -1); this.keyOffsets.zero((byte) -1);
this.keyOffsets.setPos(keyCapacity); this.keyOffsets.setPos(keyCapacity);
this.values = new DirectIntList(keyCapacity); this.keyColumns = keyColumns;
this.keyColumnTypes = keyColumnTypes; this.valueColumns = valueColumns;
this.valueColumnTypes = valueColumnTypes; this.columnSplit = valueColumns.size();
this.valueOffsets = new int[valueColumnTypes.length]; this.valueOffsets = new int[columnSplit];
calValueOffsets(); calValueOffsets();
} }
private void calValueOffsets() { private void calValueOffsets() {
int offset = 0; int offset = 4;
for (int i = 0; i < valueOffsets.length; i++) { for (int i = 0; i < valueOffsets.length; i++) {
valueOffsets[i] = offset; valueOffsets[i] = offset;
switch (valueColumnTypes[i]) { switch (valueColumns.get(i).type) {
case INT: case INT:
offset += 4; offset += 4;
break; break;
...@@ -81,64 +86,70 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -81,64 +86,70 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
offset += 8; offset += 8;
break; break;
default: default:
throw new JournalRuntimeException("value type is not supported: " + valueColumnTypes[i]); throw new JournalRuntimeException("value type is not supported: " + valueColumns.get(i));
} }
} }
this.valueBlockLen = offset; this.keyBlockOffset = offset;
this.keyDataOffset = 4 + offset + 4 * valueColumnTypes.length; this.keyDataOffset = this.keyBlockOffset + 4 * keyColumns.size();
} }
public void put(Key key, int v) {
long index = Hash.hashXX(key.startAddr, key.len, seed) % keyCapacity; public Values claimSlot(Key key) {
// calculate hash remembering "key" structure
// [ len | value block | key offset block | key data block ]
long h = Hash.hashXX(key.startAddr + keyBlockOffset, key.len - keyBlockOffset, seed);
long index = h % keyCapacity;
long offset = keyOffsets.get(index); long offset = keyOffsets.get(index);
if (offset == -1) { if (offset == -1) {
keyOffsets.set(index, key.startAddr - kStart); keyOffsets.set(index, key.startAddr - kStart);
values.set(index, v);
if (--free == 0) { if (--free == 0) {
rehash(); rehash();
} }
size++;
return values0.beginRead(key.startAddr, true);
} else if (eq(key, offset)) { } else if (eq(key, offset)) {
values.set(index, v);
// rollback added key // rollback added key
kPos = key.startAddr; kPos = key.startAddr;
return values0.beginRead(kStart + offset, false);
} else { } else {
probe(key, index, v); return probe0(key, index);
} }
} }
private void probe(Key key, long index, int v) { private Values probe0(Key key, long index) {
long offset; long offset;
while ((offset = keyOffsets.get(index = (++index % keyCapacity))) != -1) { while ((offset = keyOffsets.get(index = (++index % keyCapacity))) != -1) {
if (eq(key, offset)) { if (eq(key, offset)) {
values.set(index, v); kPos = key.startAddr;
return; return values0.beginRead(kStart + offset, false);
} }
} }
keyOffsets.set(index, key.startAddr - kStart); keyOffsets.set(index, key.startAddr - kStart);
values.set(index, v);
free--; free--;
if (free == 0) { if (free == 0) {
rehash(); rehash();
} }
}
public Iterator<Entry> iterator() { size++;
iterator.index = 0; return values0.beginRead(key.startAddr, true);
return iterator;
} }
private boolean eq(Key key, long offset) { private boolean eq(Key key, long offset) {
long a = kStart + offset; long a = kStart + offset;
long b = key.startAddr; long b = key.startAddr;
// check length first
if (Unsafe.getUnsafe().getInt(a) != Unsafe.getUnsafe().getInt(b)) { if (Unsafe.getUnsafe().getInt(a) != Unsafe.getUnsafe().getInt(b)) {
return false; return false;
} }
long lim = b + key.len; long lim = b + key.len;
// skip to the data
a += keyBlockOffset;
b += keyBlockOffset;
while (b < lim - 8) { while (b < lim - 8) {
if (Unsafe.getUnsafe().getLong(a) != Unsafe.getUnsafe().getLong(b)) { if (Unsafe.getUnsafe().getLong(a) != Unsafe.getUnsafe().getLong(b)) {
return false; return false;
...@@ -155,7 +166,7 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -155,7 +166,7 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
return true; return true;
} }
public Key withKey() { public Key claimKey() {
return key.beginWrite(); return key.beginWrite();
} }
...@@ -169,7 +180,6 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -169,7 +180,6 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
key.startAddr = kStart + (key.startAddr - this.kStart); key.startAddr = kStart + (key.startAddr - this.kStart);
key.appendAddr = kStart + (key.appendAddr - this.kStart); key.appendAddr = kStart + (key.appendAddr - this.kStart);
key.dataAddr = kStart + (key.dataAddr - this.kStart);
key.nextColOffset = kStart + (key.nextColOffset - this.kStart); key.nextColOffset = kStart + (key.nextColOffset - this.kStart);
...@@ -182,26 +192,21 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -182,26 +192,21 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
long capacity = Primes.next(keyCapacity << 1); long capacity = Primes.next(keyCapacity << 1);
DirectLongList pointers = new DirectLongList(capacity); DirectLongList pointers = new DirectLongList(capacity);
pointers.zero((byte) -1); pointers.zero((byte) -1);
DirectIntList values = new DirectIntList(capacity);
pointers.setPos(capacity); pointers.setPos(capacity);
values.setPos(capacity);
for (int i = 0, sz = this.keyOffsets.size(); i < sz; i++) { for (int i = 0, sz = this.keyOffsets.size(); i < sz; i++) {
long offset = this.keyOffsets.get(i); long offset = this.keyOffsets.get(i);
if (offset == -1) { if (offset == -1) {
continue; continue;
} }
long index = Hash.hashXX(offset + 4 + kStart, Unsafe.getUnsafe().getInt(kStart + offset), seed) % capacity; long index = Hash.hashXX(kStart + offset + keyBlockOffset, Unsafe.getUnsafe().getInt(kStart + offset) - keyBlockOffset, seed) % capacity;
while (pointers.get(index) != -1) { while (pointers.get(index) != -1) {
index = (index + 1) % capacity; index = (index + 1) % capacity;
} }
pointers.set(index, offset); pointers.set(index, offset);
values.set(index, this.values.get(i));
} }
this.keyOffsets.free(); this.keyOffsets.free();
this.values.free();
this.keyOffsets = pointers; this.keyOffsets = pointers;
this.values = values;
this.free += (capacity - keyCapacity) * loadFactor; this.free += (capacity - keyCapacity) * loadFactor;
this.keyCapacity = capacity; this.keyCapacity = capacity;
} }
...@@ -211,79 +216,67 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -211,79 +216,67 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
Unsafe.getUnsafe().freeMemory(kAddress); Unsafe.getUnsafe().freeMemory(kAddress);
kAddress = 0; kAddress = 0;
} }
values.free();
keyOffsets.free(); keyOffsets.free();
} }
@Override
public Iterator<Record> iterator() {
iterator.address = kStart;
iterator.count = size;
return iterator;
}
@Override @Override
public void close() { public void close() {
free(); free();
} }
public class Entry { public int size() {
public final Key key = DirectCompositeKeyIntMap.this.key; return size;
public int value;
} }
public class EntryIterator extends AbstractImmutableIterator<Entry> { public static class Builder {
private final List<ColumnMetadata> valueColumns = new ArrayList<>();
private final List<ColumnMetadata> keyColumns = new ArrayList<>();
private long capacity = 67;
private long dataSize = 4096;
private float loadFactor = 0.5f;
private long index; public Builder valueColumn(ColumnMetadata metadata) {
valueColumns.add(metadata);
return this;
}
@Override public Builder keyColumn(ColumnMetadata metadata) {
public boolean hasNext() { keyColumns.add(metadata);
if (index >= keyCapacity) { return this;
return false; }
}
long offset = -1; public Builder setCapacity(long capacity) {
while (index < keyCapacity && (offset = keyOffsets.get(index)) == -1) { this.capacity = capacity;
index++; return this;
} }
if (offset != -1) { public Builder setDataSize(long dataSize) {
entry.value = values.get(index++); this.dataSize = dataSize;
entry.key.beginRead((int) offset); return this;
return true; }
}
return false; public Builder setLoadFactor(float loadFactor) {
this.loadFactor = loadFactor;
return this;
} }
@Override public MultiMap build() {
public Entry next() { return new MultiMap(capacity, dataSize, loadFactor, valueColumns, keyColumns);
return entry;
} }
} }
/**
* Column count is fixed. Key structure:
* <pre>
* len[4] | column2 offset [4] | column 3 offset [4] ... | data1 | data2 ...
*
* To offset of column 0 data skip all of the header information:
*
* offset = 4 + columnCount * 4
*
* To get offset of column 1 and onwards:
*
* offset = 4 + (columnIndex - 1) * 4
*
* To get length of column 0:
*
* len = column1Offset - 4 - 4 * columnCount
*
* To get length of column 1 and onwards:
*
* len = column2Offset - column1Offset
* </pre>
*/
public class Key { public class Key {
private long startAddr; private long startAddr;
private long dataAddr;
private long appendAddr; private long appendAddr;
private int len; private int len;
private char[] strBuf = null;
private long nextColOffset; private long nextColOffset;
private void checkSize(int size) { private void checkSize(int size) {
...@@ -292,10 +285,6 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -292,10 +285,6 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
} }
} }
public long getLong(int index) {
return Unsafe.getUnsafe().getLong(getColumnAddress(index));
}
public Key putLong(long value) { public Key putLong(long value) {
checkSize(8); checkSize(8);
Unsafe.getUnsafe().putLong(appendAddr, value); Unsafe.getUnsafe().putLong(appendAddr, value);
...@@ -312,7 +301,7 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -312,7 +301,7 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
} }
private void writeOffset() { private void writeOffset() {
Unsafe.getUnsafe().putInt(nextColOffset, (int) (appendAddr - dataAddr)); Unsafe.getUnsafe().putInt(nextColOffset, (int) (appendAddr - startAddr));
nextColOffset += 4; nextColOffset += 4;
} }
...@@ -324,45 +313,197 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo ...@@ -324,45 +313,197 @@ public class DirectCompositeKeyIntMap implements Closeable, Iterable<DirectCompo
} }
appendAddr += len << 1; appendAddr += len << 1;
writeOffset(); writeOffset();
return this;
}
public Key $() {
Unsafe.getUnsafe().putInt(startAddr, len = (int) (appendAddr - startAddr));
kPos = appendAddr;
return this; return this;
} }
private long getColumnAddress(int index) { public Key beginWrite() {
if (index == 0) { startAddr = kPos;
return dataAddr; appendAddr = startAddr + keyDataOffset;
} else { nextColOffset = startAddr + keyBlockOffset;
return Unsafe.getUnsafe().getInt(startAddr + 4 + valueBlockLen + (index - 1) * 4) + dataAddr; return this;
}
}
public class Values {
public long address;
private boolean _new;
public void putDouble(int index, double value) {
Unsafe.getUnsafe().putDouble(address0(index), value);
}
public double getDouble(int index) {
return Unsafe.getUnsafe().getDouble(address0(index));
}
public void putInt(int index, int value) {
Unsafe.getUnsafe().putInt(address0(index), value);
}
public int getInt(int index) {
return Unsafe.getUnsafe().getInt(address0(index));
}
private long address0(int index) {
return address + valueOffsets[index];
}
private Values beginRead(long address, boolean _new) {
this.address = address;
this._new = _new;
return this;
}
public boolean isNew() {
return _new;
}
}
public class Record extends AbstractDataRow {
private long address;
private char[] strBuf = null;
private ObjIntHashMap<String> nameCache;
private long address0(int index) {
if (index < columnSplit) {
return address + valueOffsets[index];
}
if (index == columnSplit) {
return address + keyDataOffset;
} }
return Unsafe.getUnsafe().getInt(address + keyBlockOffset + (index - columnSplit - 1) * 4) + address;
} }
private Record init(long address) {
this.address = address;
return this;
}
@Override
public int getColumnCount() {
return valueColumns.size() + keyColumns.size();
}
@Override
public double getDouble(int index) {
return Unsafe.getUnsafe().getDouble(address0(index));
}
@Override
public long getLong(int index) {
return Unsafe.getUnsafe().getLong(address0(index));
}
@Override
public int getInt(int index) {
return Unsafe.getUnsafe().getInt(address0(index));
}
@Override
public String getStr(int index) { public String getStr(int index) {
long address = getColumnAddress(index); long address = address0(index);
int len = (int) (getColumnAddress(index + 1) - address) >> 1; int len = (int) (address0(index + 1) - address) >> 1;
if (strBuf == null || strBuf.length < len) { if (strBuf == null || strBuf.length < len) {
strBuf = new char[len]; strBuf = new char[len];
} }
Unsafe.getUnsafe().copyMemory(null, address, strBuf, sun.misc.Unsafe.ARRAY_CHAR_BASE_OFFSET, ((long) len) << 1); Unsafe.getUnsafe().copyMemory(null, address, strBuf, sun.misc.Unsafe.ARRAY_CHAR_BASE_OFFSET, ((long) len) << 1);
return new String(strBuf); return new String(strBuf, 0, len);
} }
public Key $() { @Override
Unsafe.getUnsafe().putInt(startAddr, len = (int) (appendAddr - startAddr)); public int getColumnIndex(String column) {
kPos = appendAddr; if (nameCache == null) {
return this; populateNameCache();
}
return nameCache.get(column);
} }
public Key beginWrite() { private void populateNameCache() {
startAddr = kPos; nameCache = new ObjIntHashMap<>();
dataAddr = appendAddr = startAddr + keyDataOffset; for (int i = 0, valueColumnsSize = valueColumns.size(); i < valueColumnsSize; i++) {
nextColOffset = startAddr + 4 + valueBlockLen; nameCache.put(valueColumns.get(i).name, i);
return this; }
for (int i = 0, keyColumnsSize = keyColumns.size(); i < keyColumnsSize; i++) {
ColumnMetadata m = keyColumns.get(i);
nameCache.put(m.name, i + columnSplit);
}
} }
public Key beginRead(int offset) { @Override
startAddr = kStart + offset; protected ColumnType getColumnTypeInternal(int x) {
dataAddr = startAddr + keyDataOffset; if (x < columnSplit) {
return this; return valueColumns.get(x).type;
} else {
return keyColumns.get(x - columnSplit).type;
}
}
@Override
public long getDate(int index) {
return Unsafe.getUnsafe().getLong(address0(index));
}
@Override
public boolean getBool(int index) {
return Unsafe.getUnsafe().getByte(address0(index)) == 1;
}
@Override
public short getShort(int index) {
return Unsafe.getUnsafe().getShort(address0(index));
}
@Override
public DataRow getSlave() {
return null;
}
@Override
public byte get(int index) {
return Unsafe.getUnsafe().getByte(address0(index));
}
@Override
public void getBin(int col, OutputStream s) {
throw new JournalRuntimeException("Not implemented");
}
@Override
public InputStream getBin(int col) {
throw new JournalRuntimeException("Not implemented");
}
@Override
public String getSym(int index) {
return getStr(index);
}
}
public class RecordIterator extends AbstractImmutableIterator<Record> {
private int count;
private long address;
@Override
public boolean hasNext() {
return count > 0;
}
@Override
public Record next() {
long address = this.address;
this.address = address + Unsafe.getUnsafe().getInt(address);
count--;
return record.init(address);
} }
} }
} }
\ No newline at end of file
...@@ -62,7 +62,7 @@ public class FlexBufferSink implements CharSink { ...@@ -62,7 +62,7 @@ public class FlexBufferSink implements CharSink {
try { try {
buffer.flip(); buffer.flip();
channel.write(buffer); channel.write(buffer);
buffer.rewind(); buffer.clear();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} }
......
/* /*
* Copyright (c) 2014-2015. Vlad Ilyushchenko * Copyright (c) 2014. Vlad Ilyushchenko
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
...@@ -16,12 +16,12 @@ ...@@ -16,12 +16,12 @@
package com.nfsdb.journal.export; package com.nfsdb.journal.export;
import com.nfsdb.journal.factory.configuration.ColumnMetadata; import com.nfsdb.journal.lang.cst.DataRow;
import com.nfsdb.journal.lang.cst.EntrySource;
import com.nfsdb.journal.lang.cst.JournalEntry;
import com.nfsdb.journal.utils.Dates; import com.nfsdb.journal.utils.Dates;
import com.nfsdb.journal.utils.Numbers; import com.nfsdb.journal.utils.Numbers;
import java.util.Iterator;
public class JournalEntryPrinter { public class JournalEntryPrinter {
private final boolean enabled; private final boolean enabled;
private final CharSink sink; private final CharSink sink;
...@@ -31,10 +31,15 @@ public class JournalEntryPrinter { ...@@ -31,10 +31,15 @@ public class JournalEntryPrinter {
this.enabled = enabled; this.enabled = enabled;
} }
public void print(JournalEntry e) { public void print(DataRow e) {
for (int i = 0, sz = e.partition.getJournal().getMetadata().getColumnCount(); i < sz; i++) { if (e == null) {
ColumnMetadata m = e.partition.getJournal().getMetadata().getColumnMetadata(i); sink.put("\n");
switch (m.type) { sink.flush();
return;
}
for (int i = 0, sz = e.getColumnCount(); i < sz; i++) {
switch (e.getColumnType(i)) {
case DATE: case DATE:
Dates.appendDateTime(sink, e.getLong(i)); Dates.appendDateTime(sink, e.getLong(i));
break; break;
...@@ -65,21 +70,16 @@ public class JournalEntryPrinter { ...@@ -65,21 +70,16 @@ public class JournalEntryPrinter {
} }
sink.put('\t'); sink.put('\t');
} }
if (e.slave != null) { print(e.getSlave());
print(e.slave);
} else {
sink.put('\n');
sink.flush();
}
} }
public void print(EntrySource src) { public <X extends DataRow> void print(Iterator<X> src) {
if (!enabled) { if (!enabled) {
return; return;
} }
for (JournalEntry e : src) { while (src.hasNext()) {
print(e); print(src.next());
} }
} }
} }
/*
* Copyright (c) 2014. Vlad Ilyushchenko
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nfsdb.journal.lang.cst;
import com.nfsdb.journal.column.ColumnType;
import java.io.InputStream;
import java.io.OutputStream;
public abstract class AbstractDataRow implements DataRow {
private ColumnType[] types;
@Override
public byte get(String column) {
return get(getColumnIndex(column));
}
@Override
public int getInt(String column) {
return getInt(getColumnIndex(column));
}
@Override
public long getLong(String column) {
return getLong(getColumnIndex(column));
}
@Override
public double getDouble(String column) {
return getDouble(getColumnIndex(column));
}
@Override
public CharSequence getStr(String column) {
return getStr(getColumnIndex(column));
}
@Override
public String getSym(String column) {
return getSym(getColumnIndex(column));
}
@Override
public boolean getBool(String column) {
return getBool(getColumnIndex(column));
}
@Override
public void getBin(String column, OutputStream s) {
getBin(getColumnIndex(column), s);
}
@Override
public InputStream getBin(String column) {
return getBin(getColumnIndex(column));
}
@Override
public ColumnType getColumnType(int x) {
if (types == null) {
types = new ColumnType[getColumnCount()];
}
if (types[x] != null) {
return types[x];
}
return types[x] = getColumnTypeInternal(x);
}
protected abstract ColumnType getColumnTypeInternal(int x);
}
...@@ -14,39 +14,59 @@ ...@@ -14,39 +14,59 @@
* limitations under the License. * limitations under the License.
*/ */
package com.nfsdb.journal; package com.nfsdb.journal.lang.cst;
import com.nfsdb.journal.collections.DirectCompositeKeyIntMap;
import com.nfsdb.journal.column.ColumnType; import com.nfsdb.journal.column.ColumnType;
import com.nfsdb.journal.utils.Rnd;
import org.junit.Assert; import java.io.InputStream;
import org.junit.Test; import java.io.OutputStream;
public class DirectMapTest { public interface DataRow {
@Test int getColumnIndex(String column);
public void testCompositeKeyMap() throws Exception {
DirectCompositeKeyIntMap map = new DirectCompositeKeyIntMap(new ColumnType[]{ColumnType.LONG, ColumnType.STRING}, new ColumnType[]{ColumnType.DOUBLE, ColumnType.DOUBLE}); byte get(String column);
Rnd rnd = new Rnd();
byte get(int col);
for (int i = 0; i < 10000; i++) {
map.put( int getInt(String column);
map.withKey()
.putLong(rnd.nextLong()) int getInt(int col);
.putStr(rnd.nextString(10))
.$() long getLong(String column);
, i
); long getLong(int col);
}
long getDate(int col);
int count = 0; double getDouble(String column);
for (DirectCompositeKeyIntMap.Entry e : map) {
count++; double getDouble(int col);
e.key.getLong(0); CharSequence getStr(String column);
e.key.getStr(1);
} CharSequence getStr(int col);
Assert.assertEquals(10000, count); String getSym(String column);
}
String getSym(int col);
boolean getBool(String column);
boolean getBool(int col);
void getBin(int col, OutputStream s);
void getBin(String column, OutputStream s);
short getShort(int col);
InputStream getBin(String column);
InputStream getBin(int col);
DataRow getSlave();
int getColumnCount();
ColumnType getColumnType(int column);
} }
...@@ -17,100 +17,92 @@ ...@@ -17,100 +17,92 @@
package com.nfsdb.journal.lang.cst; package com.nfsdb.journal.lang.cst;
import com.nfsdb.journal.Partition; import com.nfsdb.journal.Partition;
import com.nfsdb.journal.column.ColumnType;
import com.nfsdb.journal.column.FixedColumn; import com.nfsdb.journal.column.FixedColumn;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
public class JournalEntry { public class JournalEntry extends AbstractDataRow {
public Partition<Object> partition; public Partition<Object> partition;
public long rowid; public long rowid;
public JournalEntry slave; public JournalEntry slave;
@Override
public int getColumnIndex(String column) { public int getColumnIndex(String column) {
return partition.getJournal().getMetadata().getColumnIndex(column); return partition.getJournal().getMetadata().getColumnIndex(column);
} }
public byte get(String column) { @Override
return get(getColumnIndex(column));
}
public byte get(int col) { public byte get(int col) {
return ((FixedColumn) partition.getAbstractColumn(col)).getByte(rowid); return ((FixedColumn) partition.getAbstractColumn(col)).getByte(rowid);
} }
public int getInt(String column) { @Override
return getInt(getColumnIndex(column));
}
public int getInt(int col) { public int getInt(int col) {
return partition.getInt(rowid, col); return partition.getInt(rowid, col);
} }
public long getLong(String column) { @Override
return getLong(getColumnIndex(column));
}
public long getLong(int col) { public long getLong(int col) {
return partition.getLong(rowid, col); return partition.getLong(rowid, col);
} }
@Override
public long getDate(int col) { public long getDate(int col) {
return partition.getLong(rowid, col); return partition.getLong(rowid, col);
} }
public double getDouble(String column) { @Override
return getDouble(getColumnIndex(column));
}
public double getDouble(int col) { public double getDouble(int col) {
return partition.getDouble(rowid, col); return partition.getDouble(rowid, col);
} }
public String getStr(String column) { @Override
return getStr(getColumnIndex(column));
}
public String getStr(int col) { public String getStr(int col) {
return partition.getStr(rowid, col); return partition.getStr(rowid, col);
} }
public String getSym(String column) { @Override
return getSym(getColumnIndex(column));
}
public String getSym(int col) { public String getSym(int col) {
return partition.getSym(rowid, col); return partition.getSym(rowid, col);
} }
public boolean getBool(String column) { @Override
return getBool(getColumnIndex(column));
}
public boolean getBool(int col) { public boolean getBool(int col) {
return partition.getBoolean(rowid, col); return partition.getBoolean(rowid, col);
} }
@Override
public void getBin(int col, OutputStream s) { public void getBin(int col, OutputStream s) {
partition.getBin(rowid, col, s); partition.getBin(rowid, col, s);
} }
public void getBin(String column, OutputStream s) { @Override
getBin(getColumnIndex(column), s);
}
public short getShort(int col) { public short getShort(int col) {
return partition.getShort(rowid, col); return partition.getShort(rowid, col);
} }
public InputStream getBin(String column) { @Override
return getBin(getColumnIndex(column));
}
public InputStream getBin(int col) { public InputStream getBin(int col) {
return partition.getBin(rowid, col); return partition.getBin(rowid, col);
} }
@Override
public DataRow getSlave() {
return slave;
}
@Override
public int getColumnCount() {
return partition.getJournal().getMetadata().getColumnCount();
}
@Override
public ColumnType getColumnTypeInternal(int column) {
return partition.getJournal().getMetadata().getColumnMetadata(column).type;
}
@Override @Override
public String toString() { public String toString() {
return "DataItem{" + return "DataItem{" +
......
/* /*
* Copyright (c) 2014-2015. Vlad Ilyushchenko * Copyright (c) 2014. Vlad Ilyushchenko
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
...@@ -346,6 +346,10 @@ final public class Dates { ...@@ -346,6 +346,10 @@ final public class Dates {
return millis - millis % HOUR_MILLIS; return millis - millis % HOUR_MILLIS;
} }
public static long floorMI(long millis) {
return millis - millis % MINUTE_MILLIS;
}
public static long ceilYYYY(long millis) { public static long ceilYYYY(long millis) {
int y; int y;
boolean l; boolean l;
......
...@@ -39,9 +39,10 @@ public class Hash { ...@@ -39,9 +39,10 @@ public class Hash {
public static int hashXX(long address, int len, int seed) { public static int hashXX(long address, int len, int seed) {
int i32; int i32;
long p = address; long p = address;
long l = address + len;
if (len >= 16) { if (len >= 16) {
int limit = len - 16; long limit = l - 16;
int v1 = seed + PRIME32_1 + PRIME32_2; int v1 = seed + PRIME32_1 + PRIME32_2;
int v2 = seed + PRIME32_2; int v2 = seed + PRIME32_2;
int v3 = seed; int v3 = seed;
...@@ -52,17 +53,18 @@ public class Hash { ...@@ -52,17 +53,18 @@ public class Hash {
v1 = rotl(v1, 13); v1 = rotl(v1, 13);
v1 *= PRIME32_1; v1 *= PRIME32_1;
p += 4; p += 4;
v2 += Unsafe.getUnsafe().getInt(p) * PRIME32_2; v2 += Unsafe.getUnsafe().getInt(p) * PRIME32_2;
v2 = rotl(v2, 13); v2 = rotl(v2, 13);
v2 *= PRIME32_1; v2 *= PRIME32_1;
p += 4; p += 4;
v3 += Unsafe.getUnsafe().getInt(p) * PRIME32_2; v3 += Unsafe.getUnsafe().getInt(p) * PRIME32_2;
v3 = rotl(v3, 13); v3 = rotl(v3, 13);
v3 *= PRIME32_1; v3 *= PRIME32_1;
p += 4; p += 4;
int i = Unsafe.getUnsafe().getInt(p); v4 += Unsafe.getUnsafe().getInt(p) * PRIME32_2;
v4 += i * PRIME32_2;
v4 = rotl(v4, 13); v4 = rotl(v4, 13);
v4 *= PRIME32_1; v4 *= PRIME32_1;
p += 4; p += 4;
...@@ -76,13 +78,13 @@ public class Hash { ...@@ -76,13 +78,13 @@ public class Hash {
i32 += len; i32 += len;
while (p + 4 <= len) { while (p + 4 <= l) {
i32 += Unsafe.getUnsafe().getInt(p) * PRIME32_3; i32 += Unsafe.getUnsafe().getInt(p) * PRIME32_3;
i32 = rotl(i32, 17) * PRIME32_4; i32 = rotl(i32, 17) * PRIME32_4;
p += 4; p += 4;
} }
while (p < len) { while (p < l) {
i32 += Unsafe.getUnsafe().getByte(p) * PRIME32_5; i32 += Unsafe.getUnsafe().getByte(p) * PRIME32_5;
i32 = rotl(i32, 11) * PRIME32_1; i32 = rotl(i32, 11) * PRIME32_1;
p++; p++;
......
/*
* Copyright (c) 2014. Vlad Ilyushchenko
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nfsdb.journal;
import com.nfsdb.journal.collections.MultiMap;
import com.nfsdb.journal.column.ColumnType;
import com.nfsdb.journal.export.CharSink;
import com.nfsdb.journal.export.JournalEntryPrinter;
import com.nfsdb.journal.export.StringSink;
import com.nfsdb.journal.factory.configuration.ColumnMetadata;
import com.nfsdb.journal.lang.cst.JournalEntry;
import com.nfsdb.journal.model.Quote;
import com.nfsdb.journal.test.tools.AbstractTest;
import com.nfsdb.journal.test.tools.TestUtils;
import com.nfsdb.journal.utils.Dates;
import org.junit.Assert;
import org.junit.Test;
public class MultiMapTest extends AbstractTest {
private CharSink sink = new StringSink();
/*
@Test
public void testMultiValue() throws Exception {
try (Multimap map = new Multimap(new ColumnType[]{ColumnType.LONG, ColumnType.STRING}, new ColumnType[]{ColumnType.DOUBLE})) {
Rnd rnd = new Rnd();
for (int i = 0; i < 10000; i++) {
double d = rnd.nextDouble();
long l = rnd.nextLong();
String s = rnd.nextString(10);
map.claimSlot(
map.claimKey().putLong(l).putStr(s).$()
).putDouble(0, d);
}
rnd = new Rnd();
for (Multimap.Record r: map) {
Assert.assertEquals(rnd.nextDouble(), r.getDouble(0), 0.0000001d);
Assert.assertEquals(rnd.nextLong(), r.getLong(1));
Assert.assertEquals(rnd.nextString(10), r.getStr(2));
}
}
}
*/
@Test
public void testCount() throws Exception {
final String expected = "186\t2014-12-30T03:08:00.000Z\tAGK.L\t\n" +
"185\t2014-12-30T03:08:00.000Z\tBP.L\t\n" +
"216\t2014-12-30T03:08:00.000Z\tRRS.L\t\n" +
"196\t2014-12-30T03:08:00.000Z\tBT-A.L\t\n" +
"214\t2014-12-30T03:08:00.000Z\tGKN.L\t\n" +
"184\t2014-12-30T03:08:00.000Z\tLLOY.L\t\n" +
"187\t2014-12-30T03:08:00.000Z\tABF.L\t\n" +
"196\t2014-12-30T03:08:00.000Z\tWTB.L\t\n" +
"193\t2014-12-30T03:08:00.000Z\tTLW.L\t\n" +
"192\t2014-12-30T03:08:00.000Z\tADM.L\t\n" +
"189\t2014-12-30T03:09:00.000Z\tBP.L\t\n" +
"203\t2014-12-30T03:09:00.000Z\tGKN.L\t\n" +
"201\t2014-12-30T03:09:00.000Z\tADM.L\t\n" +
"187\t2014-12-30T03:09:00.000Z\tTLW.L\t\n" +
"168\t2014-12-30T03:09:00.000Z\tRRS.L\t\n" +
"213\t2014-12-30T03:09:00.000Z\tAGK.L\t\n" +
"214\t2014-12-30T03:09:00.000Z\tBT-A.L\t\n" +
"204\t2014-12-30T03:09:00.000Z\tLLOY.L\t\n" +
"214\t2014-12-30T03:09:00.000Z\tWTB.L\t\n" +
"207\t2014-12-30T03:09:00.000Z\tABF.L\t\n" +
"185\t2014-12-30T03:10:00.000Z\tBP.L\t\n" +
"221\t2014-12-30T03:10:00.000Z\tWTB.L\t\n" +
"206\t2014-12-30T03:10:00.000Z\tLLOY.L\t\n" +
"215\t2014-12-30T03:10:00.000Z\tBT-A.L\t\n" +
"195\t2014-12-30T03:10:00.000Z\tTLW.L\t\n" +
"203\t2014-12-30T03:10:00.000Z\tADM.L\t\n" +
"220\t2014-12-30T03:10:00.000Z\tAGK.L\t\n" +
"194\t2014-12-30T03:10:00.000Z\tGKN.L\t\n" +
"172\t2014-12-30T03:10:00.000Z\tRRS.L\t\n" +
"189\t2014-12-30T03:10:00.000Z\tABF.L\t\n" +
"213\t2014-12-30T03:11:00.000Z\tADM.L\t\n" +
"195\t2014-12-30T03:11:00.000Z\tLLOY.L\t\n" +
"185\t2014-12-30T03:11:00.000Z\tBP.L\t\n" +
"198\t2014-12-30T03:11:00.000Z\tBT-A.L\t\n" +
"210\t2014-12-30T03:11:00.000Z\tRRS.L\t\n" +
"213\t2014-12-30T03:11:00.000Z\tGKN.L\t\n" +
"194\t2014-12-30T03:11:00.000Z\tAGK.L\t\n" +
"220\t2014-12-30T03:11:00.000Z\tWTB.L\t\n" +
"190\t2014-12-30T03:11:00.000Z\tABF.L\t\n" +
"182\t2014-12-30T03:11:00.000Z\tTLW.L\t\n" +
"212\t2014-12-30T03:12:00.000Z\tABF.L\t\n" +
"214\t2014-12-30T03:12:00.000Z\tAGK.L\t\n" +
"186\t2014-12-30T03:12:00.000Z\tTLW.L\t\n" +
"231\t2014-12-30T03:12:00.000Z\tBP.L\t\n" +
"191\t2014-12-30T03:12:00.000Z\tLLOY.L\t\n" +
"209\t2014-12-30T03:12:00.000Z\tRRS.L\t\n" +
"196\t2014-12-30T03:12:00.000Z\tGKN.L\t\n" +
"191\t2014-12-30T03:12:00.000Z\tADM.L\t\n" +
"186\t2014-12-30T03:12:00.000Z\tBT-A.L\t\n" +
"184\t2014-12-30T03:12:00.000Z\tWTB.L\t\n" +
"4\t2014-12-30T03:13:00.000Z\tBP.L\t\n" +
"6\t2014-12-30T03:13:00.000Z\tGKN.L\t\n" +
"7\t2014-12-30T03:13:00.000Z\tTLW.L\t\n" +
"7\t2014-12-30T03:13:00.000Z\tABF.L\t\n" +
"6\t2014-12-30T03:13:00.000Z\tADM.L\t\n" +
"3\t2014-12-30T03:13:00.000Z\tWTB.L\t\n" +
"5\t2014-12-30T03:13:00.000Z\tRRS.L\t\n" +
"6\t2014-12-30T03:13:00.000Z\tLLOY.L\t\n" +
"4\t2014-12-30T03:13:00.000Z\tAGK.L\t\n" +
"3\t2014-12-30T03:13:00.000Z\tBT-A.L\t\n";
JournalWriter<Quote> w = factory.writer(Quote.class);
TestUtils.generateQuoteData(w, 10000, 1419908881558L, 30);
int tsIndex = w.getMetadata().getColumnIndex("timestamp");
int symIndex = w.getMetadata().getColumnIndex("sym");
MultiMap map = new MultiMap.Builder()
.keyColumn(w.getMetadata().getColumnMetadata(tsIndex))
.keyColumn(w.getMetadata().getColumnMetadata(symIndex))
.valueColumn(new ColumnMetadata() {{
name = "count";
type = ColumnType.INT;
}})
.setCapacity(150)
.setDataSize(1024 * 1024)
.setLoadFactor(0.5f)
.build();
for (JournalEntry e : w.rows()) {
long ts = e.getLong(tsIndex);
MultiMap.Values val = map.claimSlot(
map.claimKey()
.putLong(Dates.floorMI(ts))
.putStr(e.getSym(symIndex))
.$()
);
val.putInt(0, val.isNew() ? 1 : val.getInt(0) + 1);
}
JournalEntryPrinter out = new JournalEntryPrinter(sink, true);
out.print(map.iterator());
map.free();
Assert.assertEquals(expected, sink.toString());
// System.out.println(sink);
}
}
/* /*
* Copyright (c) 2014-2015. Vlad Ilyushchenko * Copyright (c) 2014. Vlad Ilyushchenko
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
...@@ -80,7 +80,7 @@ public class JoinSymbolOnSymbolTest { ...@@ -80,7 +80,7 @@ public class JoinSymbolOnSymbolTest {
throw new JournalRuntimeException(e); throw new JournalRuntimeException(e);
} }
out = new JournalEntryPrinter(new FlexBufferSink(new FileOutputStream(FileDescriptor.out).getChannel()), true); out = new JournalEntryPrinter(new FlexBufferSink(new FileOutputStream(FileDescriptor.out).getChannel()), false);
} }
@Before @Before
......
/* /*
* Copyright (c) 2014-2015. Vlad Ilyushchenko * Copyright (c) 2014. Vlad Ilyushchenko
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
...@@ -46,7 +46,6 @@ import org.junit.BeforeClass; ...@@ -46,7 +46,6 @@ import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
public class SingleJournalSearchTest { public class SingleJournalSearchTest {
...@@ -176,7 +175,7 @@ public class SingleJournalSearchTest { ...@@ -176,7 +175,7 @@ public class SingleJournalSearchTest {
} }
private void assertEquals(CharSequence expected, EntrySource src) throws IOException { private void assertEquals(CharSequence expected, EntrySource src) {
JournalEntryPrinter p = new JournalEntryPrinter(sink, true); JournalEntryPrinter p = new JournalEntryPrinter(sink, true);
p.print(src); p.print(src);
Assert.assertEquals(expected, sink.toString()); Assert.assertEquals(expected, sink.toString());
......
...@@ -19,7 +19,6 @@ package com.nfsdb.journal.lang.experimental; ...@@ -19,7 +19,6 @@ package com.nfsdb.journal.lang.experimental;
import com.nfsdb.journal.Journal; import com.nfsdb.journal.Journal;
import com.nfsdb.journal.JournalWriter; import com.nfsdb.journal.JournalWriter;
import com.nfsdb.journal.Partition; import com.nfsdb.journal.Partition;
import com.nfsdb.journal.collections.DirectCompositeKeyIntMap;
import com.nfsdb.journal.column.FixedColumn; import com.nfsdb.journal.column.FixedColumn;
import com.nfsdb.journal.column.SymbolTable; import com.nfsdb.journal.column.SymbolTable;
import com.nfsdb.journal.factory.configuration.JournalConfigurationBuilder; import com.nfsdb.journal.factory.configuration.JournalConfigurationBuilder;
...@@ -47,7 +46,6 @@ import com.nfsdb.journal.test.tools.TestUtils; ...@@ -47,7 +46,6 @@ import com.nfsdb.journal.test.tools.TestUtils;
import com.nfsdb.journal.utils.Files; import com.nfsdb.journal.utils.Files;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import java.util.Arrays; import java.util.Arrays;
...@@ -224,34 +222,4 @@ public class CstTest { ...@@ -224,34 +222,4 @@ public class CstTest {
System.out.println(count); System.out.println(count);
System.out.println((System.nanoTime() - t) / 20); System.out.println((System.nanoTime() - t) / 20);
} }
@Test
@Ignore
public void testResample() throws Exception {
JournalWriter<Quote> w = factory.writer(Quote.class);
TestUtils.generateQuoteData(w, 100000, System.currentTimeMillis(), 2);
DirectCompositeKeyIntMap map = new DirectCompositeKeyIntMap(null, null);
int tsIndex = w.getMetadata().getColumnIndex("timestamp");
int symIndex = w.getMetadata().getColumnIndex("sym");
for (JournalEntry e : w.rows()) {
long ts = e.getLong(tsIndex);
map.put(
map.withKey()
.putStr(e.getSym(symIndex))
.putLong(ts - (ts % 60000L))
.$()
, 1
);
}
for (DirectCompositeKeyIntMap.Entry e : map) {
System.out.println(e.key.getStr(1) + "\t" + e.key.getLong(0) + "\t" + e.value);
}
map.free();
}
} }
...@@ -37,10 +37,6 @@ public class HashTest { ...@@ -37,10 +37,6 @@ public class HashTest {
rnd.nextChars(address, LEN); rnd.nextChars(address, LEN);
hashes.add(Hash.hashXX(address, LEN, rnd.nextInt())); hashes.add(Hash.hashXX(address, LEN, rnd.nextInt()));
} }
System.out.println(hashes.size());
Assert.assertTrue("Hash function distribution dropped", hashes.size() > 99990); Assert.assertTrue("Hash function distribution dropped", hashes.size() > 99990);
} }
} }
/* /*
* Copyright (c) 2014-2015. Vlad Ilyushchenko * Copyright (c) 2014. Vlad Ilyushchenko
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
...@@ -70,7 +70,7 @@ public final class TestUtils { ...@@ -70,7 +70,7 @@ public final class TestUtils {
public static void generateQuoteData(JournalWriter<Quote> w, int count, long timestamp, long increment) throws JournalException { public static void generateQuoteData(JournalWriter<Quote> w, int count, long timestamp, long increment) throws JournalException {
String symbols[] = {"AGK.L", "BP.L", "TLW.L", "ABF.L", "LLOY.L", "BT-A.L", "WTB.L", "RRS.L", "ADM.L", "GKN.L", "HSBA.L"}; String symbols[] = {"AGK.L", "BP.L", "TLW.L", "ABF.L", "LLOY.L", "BT-A.L", "WTB.L", "RRS.L", "ADM.L", "GKN.L", "HSBA.L"};
Quote q = new Quote(); Quote q = new Quote();
Rnd r = new Rnd(System.currentTimeMillis(), System.currentTimeMillis()); Rnd r = new Rnd();
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
q.clear(); q.clear();
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册