提交 f97bbaf7 编写于 作者: S Serge Rider

Merge remote-tracking branch 'origin/devel' into devel

......@@ -34,7 +34,6 @@ import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.app.DBPProject;
import org.jkiss.dbeaver.model.edit.DBEPersistAction;
import org.jkiss.dbeaver.model.exec.DBCException;
import org.jkiss.dbeaver.model.exec.DBCExecutionContext;
import org.jkiss.dbeaver.model.exec.DBCSession;
import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer;
......@@ -164,7 +163,7 @@ public class DatabaseConsumerPageMapping extends ActiveWizardPage<DataTransferWi
autoAssignButton = UIUtils.createDialogButton(buttonsPanel,
DTMessages.data_transfer_db_consumer_auto_assign,
UIIcon.ASTERISK,
"Auto-assign table and column mappings",
DTMessages.data_transfer_db_consumer_auto_assign_description,
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
......@@ -178,7 +177,7 @@ public class DatabaseConsumerPageMapping extends ActiveWizardPage<DataTransferWi
final Button mapTableButton = UIUtils.createDialogButton(buttonsPanel,
DTMessages.data_transfer_db_consumer_existing_table,
DBIcon.TREE_TABLE,
"Select target table",
DTMessages.data_transfer_db_consumer_existing_table_description,
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
......@@ -191,7 +190,7 @@ public class DatabaseConsumerPageMapping extends ActiveWizardPage<DataTransferWi
final Button createNewButton = UIUtils.createDialogButton(buttonsPanel,
DTMessages.data_transfer_db_consumer_new_table,
DBIcon.TREE_VIEW,
"Set target table name",
DTMessages.data_transfer_db_consumer_new_table_description,
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
......@@ -204,7 +203,7 @@ public class DatabaseConsumerPageMapping extends ActiveWizardPage<DataTransferWi
final Button columnsButton = UIUtils.createDialogButton(buttonsPanel,
DTMessages.data_transfer_db_consumer_column_mappings,
DBIcon.TREE_COLUMNS,
"Configure column mappings (advanced)",
DTMessages.data_transfer_db_consumer_column_mappings_description,
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
......
......@@ -24,8 +24,10 @@ import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.dbeaver.tools.transfer.stream.StreamDataImporterColumnInfo;
import org.jkiss.utils.CommonUtils;
import java.util.List;
import java.util.Map;
/**
......@@ -123,8 +125,16 @@ public class DatabaseMappingAttribute implements DatabaseMappingObject {
targetName = source.getName();
}
DBSEntity targetEntity = (DBSEntity) parent.getTarget();
this.target = DBUtils.findObject(
targetEntity.getAttributes(monitor), DBUtils.getUnQuotedIdentifier(targetEntity.getDataSource(), targetName), true);
List<? extends DBSEntityAttribute> targetAttributes = targetEntity.getAttributes(monitor);
if (source instanceof StreamDataImporterColumnInfo && targetAttributes != null && source.getOrdinalPosition() < targetAttributes.size()) {
StreamDataImporterColumnInfo source = (StreamDataImporterColumnInfo) this.source;
DBSEntityAttribute targetAttribute = targetAttributes.get(source.getOrdinalPosition());
source.setDataKind(targetAttribute.getDataKind());
if (!source.isMappingMetadataPresent()) {
targetName = targetAttribute.getName();
}
}
this.target = DBUtils.findObject(targetAttributes, DBUtils.getUnQuotedIdentifier(targetEntity.getDataSource(), targetName), true);
if (this.target != null) {
mappingType = DatabaseMappingType.existing;
} else {
......
......@@ -97,9 +97,13 @@ public class DTMessages extends NLS {
public static String data_transfer_db_consumer_target_container;
public static String data_transfer_db_consumer_choose_container;
public static String data_transfer_db_consumer_auto_assign;
public static String data_transfer_db_consumer_auto_assign_description;
public static String data_transfer_db_consumer_existing_table;
public static String data_transfer_db_consumer_existing_table_description;
public static String data_transfer_db_consumer_new_table;
public static String data_transfer_db_consumer_new_table_description;
public static String data_transfer_db_consumer_column_mappings;
public static String data_transfer_db_consumer_column_mappings_description;
public static String data_transfer_db_consumer_ddl;
public static String sql_script_task_title;
......
......@@ -74,9 +74,13 @@ data_transfer_wizard_settings_column_mapping_type = Mapping type
data_transfer_db_consumer_target_container = Target container
data_transfer_db_consumer_choose_container = Choose container
data_transfer_db_consumer_auto_assign = Auto assign
data_transfer_db_consumer_auto_assign_description = Auto-assign table and column mappings
data_transfer_db_consumer_existing_table = Browse ...
data_transfer_db_consumer_existing_table_description = Select target table
data_transfer_db_consumer_new_table = New ...
data_transfer_db_consumer_new_table_description = Set target table name
data_transfer_db_consumer_column_mappings = Columns ...
data_transfer_db_consumer_column_mappings_description = Configure column mappings (advanced)
data_transfer_db_consumer_ddl = Schema ...
sql_script_task_title = SQL Script execute
......
......@@ -81,10 +81,14 @@ sql_script_task_page_settings_description = \u0412\u044B\u0431\u0435\u0440\u0438
sql_script_task_page_settings_title = \u041D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438 \u0432\u044B\u043F\u043E\u043B\u043D\u0435\u043D\u0438\u044F SQL \u0441\u043A\u0440\u0438\u043F\u0442\u0430
sql_script_task_title = \u0412\u044B\u043F\u043E\u043B\u043D\u0435\u043D\u0438\u0435 SQL \u0441\u043A\u0440\u0438\u043F\u0442\u0430
data_transfer_db_consumer_ddl = \u0421\u0445\u0435\u043C\u0430 ...
data_transfer_db_consumer_column_mappings = \u0421\u0442\u043E\u043B\u0431\u0446\u044B ...
data_transfer_db_consumer_new_table = \u041D\u043E\u0432\u043E\u0435 ...
data_transfer_db_consumer_auto_assign = \u0410\u0432\u0442\u043E\u043F\u0440\u0438\u0441\u0432\u043E\u0435\u043D\u0438\u0435
data_transfer_db_consumer_auto_assign_description = \u0410\u0432\u0442\u043E\u043F\u0440\u0438\u0441\u0432\u043E\u0435\u043D\u0438\u0435 \u0441\u043E\u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0438\u0439 \u0442\u0430\u0431\u043B\u0438\u0446 \u0438 \u0441\u0442\u043E\u043B\u0431\u0446\u043E\u0432
data_transfer_db_consumer_existing_table = \u041E\u0431\u0437\u043E\u0440 ...
data_transfer_db_consumer_auto_assign = \u0410\u0432\u0442\u043E-\u043F\u0440\u0438\u0441\u0432\u043E\u0435\u043D\u0438\u0435
data_transfer_db_consumer_existing_table_description = \u0412\u044B\u0431\u0440\u0430\u0442\u044C \u0446\u0435\u043B\u0435\u0432\u0443\u044E \u0442\u0430\u0431\u043B\u0438\u0446\u0443
data_transfer_db_consumer_new_table = \u0421\u043E\u0437\u0434\u0430\u0442\u044C ...
data_transfer_db_consumer_new_table_description = \u0421\u043E\u0437\u0434\u0430\u0442\u044C \u043D\u043E\u0432\u0443\u044E \u0446\u0435\u043B\u0435\u0432\u0443\u044E \u0442\u0430\u0431\u043B\u0438\u0446\u0443
data_transfer_db_consumer_column_mappings = \u0421\u0442\u043E\u043B\u0431\u0446\u044B ...
data_transfer_db_consumer_column_mappings_description = \u041D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0430 \u0441\u043E\u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0438\u0439 \u0441\u0442\u043E\u043B\u0431\u0446\u043E\u0432
data_transfer_db_consumer_choose_container = \u0412\u044B\u0431\u043E\u0440 \u043A\u043E\u043D\u0442\u0435\u0439\u043D\u0435\u0440\u0430
database_consumer_settings_option_use_transactions = \u0418\u0441\u043F\u043E\u043B\u044C\u0437\u043E\u0432\u0430\u0442\u044C \u0442\u0440\u0430\u043D\u0437\u0430\u043A\u0446\u0438\u0438
database_consumer_settings_option_commit_after = \u0412\u044B\u043F\u043E\u043B\u043D\u044F\u0442\u044C Commit \u043F\u043E\u0441\u043B\u0435 \u0432\u0441\u0442\u0430\u0432\u043A\u0438 \u0441\u0442\u0440\u043E\u043A
......
......@@ -29,6 +29,10 @@ public class StreamDataImporterColumnInfo extends AbstractAttribute implements D
private StreamEntityMapping entityMapping;
private DBPDataKind dataKind;
// Determines whether the mapping metadata,
// such as the column name, is present or not.
private boolean mappingMetadataPresent;
public StreamDataImporterColumnInfo(StreamEntityMapping entity, int columnIndex, String columnName, String typeName, int maxLength, DBPDataKind dataKind) {
super(columnName, typeName, -1, columnIndex, maxLength, null, null, false, false);
this.entityMapping = entity;
......@@ -61,4 +65,12 @@ public class StreamDataImporterColumnInfo extends AbstractAttribute implements D
public void setDataKind(DBPDataKind dataKind) {
this.dataKind = dataKind;
}
public boolean isMappingMetadataPresent() {
return mappingMetadataPresent;
}
public void setMappingMetadataPresent(boolean mappingMetadataPresent) {
this.mappingMetadataPresent = mappingMetadataPresent;
}
}
......@@ -18,6 +18,7 @@
package org.jkiss.dbeaver.tools.transfer.stream;
import org.jkiss.dbeaver.Log;
import org.jkiss.utils.CommonUtils;
import java.util.Map;
......@@ -28,18 +29,17 @@ public class StreamTransferUtils {
private static final Log log = Log.getLog(StreamTransferUtils.class);
private static final char DEF_DELIMITER = ',';
private static final String DEF_DELIMITER = ",";
public static String getDelimiterString(Map<String, Object> properties, String propName) {
String delimString = String.valueOf(properties.get(propName));
if (delimString == null || delimString.isEmpty()) {
delimString = String.valueOf(DEF_DELIMITER);
String delimString = CommonUtils.toString(properties.get(propName), null);
if (CommonUtils.isEmpty(delimString)) {
return DEF_DELIMITER;
} else {
delimString = delimString
return delimString
.replace("\\t", "\t")
.replace("\\n", "\n")
.replace("\\r", "\r");
}
return delimString;
}
}
......@@ -19,6 +19,7 @@ package org.jkiss.dbeaver.tools.transfer.stream.importer;
import au.com.bytecode.opencsv.CSVReader;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.DBPDataKind;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.DBUtils;
......@@ -31,6 +32,7 @@ import org.jkiss.dbeaver.tools.transfer.IDataTransferConsumer;
import org.jkiss.dbeaver.tools.transfer.stream.*;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.utils.CommonUtils;
import org.jkiss.utils.Pair;
import java.io.*;
import java.util.ArrayList;
......@@ -41,6 +43,7 @@ import java.util.Map;
* CSV importer
*/
public class DataImporterCSV extends StreamImporterAbstract {
private static final Log log = Log.getLog(DataImporterCSV.class);
private static final String PROP_ENCODING = "encoding";
private static final String PROP_HEADER = "header";
......@@ -49,8 +52,16 @@ public class DataImporterCSV extends StreamImporterAbstract {
private static final String PROP_NULL_STRING = "nullString";
private static final String PROP_EMPTY_STRING_NULL = "emptyStringNull";
private static final String PROP_ESCAPE_CHAR = "escapeChar";
private static final int MAX_COLUMN_LENGTH = 1024;
enum HeaderPosition {
private static final int MAX_DATA_TYPE_SAMPLES = 1000;
private static final Pair<DBPDataKind, String> DATA_TYPE_UNKNOWN = new Pair<>(DBPDataKind.UNKNOWN, null);
private static final Pair<DBPDataKind, String> DATA_TYPE_INTEGER = new Pair<>(DBPDataKind.NUMERIC, "INTEGER");
private static final Pair<DBPDataKind, String> DATA_TYPE_REAL = new Pair<>(DBPDataKind.NUMERIC, "REAL");
private static final Pair<DBPDataKind, String> DATA_TYPE_BOOLEAN = new Pair<>(DBPDataKind.BOOLEAN, "BOOLEAN");
private static final Pair<DBPDataKind, String> DATA_TYPE_STRING = new Pair<>(DBPDataKind.STRING, "VARCHAR");
public enum HeaderPosition {
none,
top,
}
......@@ -67,26 +78,62 @@ public class DataImporterCSV extends StreamImporterAbstract {
try (Reader reader = openStreamReader(inputStream, processorProperties)) {
try (CSVReader csvReader = openCSVReader(reader, processorProperties)) {
for (;;) {
String[] line = csvReader.readNext();
if (line == null) {
break;
String[] header = getNextLine(csvReader);
if (header == null) {
return columnsInfo;
}
for (int i = 0; i < header.length; i++) {
String column = header[i];
if (headerPosition == HeaderPosition.none) {
column = "Column" + (i + 1);
} else {
column = DBUtils.getUnQuotedIdentifier(entityMapping.getDataSource(), column);
}
if (line.length == 0) {
continue;
StreamDataImporterColumnInfo columnInfo = new StreamDataImporterColumnInfo(entityMapping, i, column, null, MAX_COLUMN_LENGTH, DBPDataKind.UNKNOWN);
columnInfo.setMappingMetadataPresent(headerPosition != HeaderPosition.none);
columnsInfo.add(columnInfo);
}
for (int sample = 0; sample < MAX_DATA_TYPE_SAMPLES; sample++) {
String[] line;
if (sample == 0 && headerPosition == HeaderPosition.none) {
// Include first line (header that does not exist) for sampling
line = header;
} else {
line = getNextLine(csvReader);
if (line == null) {
break;
}
}
for (int i = 0; i < line.length; i++) {
String column = line[i];
if (headerPosition == HeaderPosition.none) {
column = "Column" + (i + 1);
} else {
column = DBUtils.getUnQuotedIdentifier(entityMapping.getDataSource(), column);
for (int i = 0; i < Math.min(line.length, header.length); i++) {
Pair<DBPDataKind, String> dataType = getDataType(line[i]);
StreamDataImporterColumnInfo columnInfo = columnsInfo.get(i);
switch (dataType.getFirst()) {
case STRING:
columnInfo.setDataKind(dataType.getFirst());
columnInfo.setTypeName(dataType.getSecond());
break;
case NUMERIC:
case BOOLEAN:
if (columnInfo.getDataKind() == DBPDataKind.UNKNOWN) {
columnInfo.setDataKind(dataType.getFirst());
columnInfo.setTypeName(dataType.getSecond());
}
break;
}
columnsInfo.add(
new StreamDataImporterColumnInfo(
entityMapping, i, column, "VARCHAR", 1024, DBPDataKind.STRING));
}
break;
}
for (StreamDataImporterColumnInfo columnInfo : columnsInfo) {
if (columnInfo.getDataKind() == DBPDataKind.UNKNOWN) {
log.warn("Cannot guess data type for column '" + columnInfo.getName() + "', defaulting to VARCHAR");
columnInfo.setDataKind(DBPDataKind.STRING);
columnInfo.setTypeName("VARCHAR");
}
}
}
} catch (IOException e) {
......@@ -118,6 +165,43 @@ public class DataImporterCSV extends StreamImporterAbstract {
return new InputStreamReader(inputStream, encoding);
}
private String[] getNextLine(CSVReader csvReader) throws IOException {
while (true) {
String[] line = csvReader.readNext();
if (line == null) {
return null;
}
if (line.length == 0) {
continue;
}
return line;
}
}
private Pair<DBPDataKind, String> getDataType(String value) {
if (CommonUtils.isEmpty(value)) {
return DATA_TYPE_UNKNOWN;
}
try {
Integer.parseInt(value);
return DATA_TYPE_INTEGER;
} catch (NumberFormatException ignored) {
}
try {
Double.parseDouble(value);
return DATA_TYPE_REAL;
} catch (NumberFormatException ignored) {
}
if (value.equalsIgnoreCase("true") || value.equalsIgnoreCase("false")) {
return DATA_TYPE_BOOLEAN;
}
return DATA_TYPE_STRING;
}
@Override
public void runImport(@NotNull DBRProgressMonitor monitor, @NotNull DBPDataSource streamDataSource, @NotNull InputStream inputStream, @NotNull IDataTransferConsumer consumer) throws DBException {
IStreamDataImporterSite site = getSite();
......
......@@ -10,6 +10,7 @@ Require-Bundle: org.eclipse.core.runtime,
org.mockito.mockito-all,
org.jkiss.dbeaver.model,
org.jkiss.dbeaver.model.sql,
org.jkiss.dbeaver.data.transfer,
org.jkiss.dbeaver.registry,
org.jkiss.dbeaver.headless,
org.jkiss.dbeaver.ext.postgresql,
......
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2020 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.data.transfer;
import org.jkiss.dbeaver.tools.transfer.stream.StreamTransferUtils;
import org.junit.Assert;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
public class StreamTransferUtilsTest {
@Test
public void testDelimiterString() {
Map<String, Object> props = new HashMap<>();
props.put("delimiter", " \\t\\n\\r");
Assert.assertEquals(" \t\n\r", StreamTransferUtils.getDelimiterString(props, "delimiter"));
props.put("delimiter", "");
Assert.assertEquals(",", StreamTransferUtils.getDelimiterString(props, "delimiter"));
props.put("delimiter", null);
Assert.assertEquals(",", StreamTransferUtils.getDelimiterString(props, "delimiter"));
}
}
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2020 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.tools.transfer;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.model.DBPDataKind;
import org.jkiss.dbeaver.tools.transfer.stream.IStreamDataImporterSite;
import org.jkiss.dbeaver.tools.transfer.stream.StreamDataImporterColumnInfo;
import org.jkiss.dbeaver.tools.transfer.stream.StreamEntityMapping;
import org.jkiss.dbeaver.tools.transfer.stream.importer.DataImporterCSV;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(MockitoJUnitRunner.class)
public class CSVImporterTest {
private static final File DUMMY_FILE = new File("dummy");
private final DataImporterCSV importer = new DataImporterCSV();
private final StreamEntityMapping mapping = new StreamEntityMapping(DUMMY_FILE);
private final Map<String, Object> properties = new HashMap<>();
@Mock
private IStreamDataImporterSite site;
@Before
public void init() throws DBException {
importer.init(site);
Mockito.when(site.getProcessorProperties()).thenReturn(properties);
}
@Test
public void generateColumnNames() throws DBException, IOException {
List<StreamDataImporterColumnInfo> columnsInfo = readColumnsInfo("a,b,c,d", false);
Assert.assertEquals(4, columnsInfo.size());
Assert.assertEquals("Column1", columnsInfo.get(0).getName());
Assert.assertEquals("Column2", columnsInfo.get(1).getName());
Assert.assertEquals("Column3", columnsInfo.get(2).getName());
Assert.assertEquals("Column4", columnsInfo.get(3).getName());
}
@Test
public void readColumnNames() throws DBException, IOException {
List<StreamDataImporterColumnInfo> columnsInfo = readColumnsInfo("a,b,c,d", true);
Assert.assertEquals(4, columnsInfo.size());
Assert.assertEquals("a", columnsInfo.get(0).getName());
Assert.assertEquals("b", columnsInfo.get(1).getName());
Assert.assertEquals("c", columnsInfo.get(2).getName());
Assert.assertEquals("d", columnsInfo.get(3).getName());
}
@Test
public void guessColumnTypes() throws DBException, IOException {
List<StreamDataImporterColumnInfo> columnsInfo = readColumnsInfo("1,2.0,abc,false", false);
Assert.assertEquals(4, columnsInfo.size());
Assert.assertEquals(DBPDataKind.NUMERIC, columnsInfo.get(0).getDataKind());
Assert.assertEquals("INTEGER", columnsInfo.get(0).getTypeName());
Assert.assertEquals(DBPDataKind.NUMERIC, columnsInfo.get(1).getDataKind());
Assert.assertEquals("REAL", columnsInfo.get(1).getTypeName());
Assert.assertEquals(DBPDataKind.STRING, columnsInfo.get(2).getDataKind());
Assert.assertEquals(DBPDataKind.BOOLEAN, columnsInfo.get(3).getDataKind());
}
@Test
public void guessColumnTypesOverSamples() throws DBException, IOException {
List<StreamDataImporterColumnInfo> columnsInfo = readColumnsInfo("1\n\n2\n3\ntest", false);
Assert.assertEquals(1, columnsInfo.size());
Assert.assertEquals(DBPDataKind.STRING, columnsInfo.get(0).getDataKind());
}
@Test
public void guessColumnTypesDefault() throws DBException, IOException {
List<StreamDataImporterColumnInfo> columnsInfo = readColumnsInfo(",", false);
Assert.assertEquals(2, columnsInfo.size());
Assert.assertEquals(DBPDataKind.STRING, columnsInfo.get(0).getDataKind());
Assert.assertEquals(DBPDataKind.STRING, columnsInfo.get(1).getDataKind());
}
private List<StreamDataImporterColumnInfo> readColumnsInfo(String data, boolean isHeaderPresent) throws DBException, IOException {
properties.put("header", isHeaderPresent ? DataImporterCSV.HeaderPosition.top : DataImporterCSV.HeaderPosition.none);
try (ByteArrayInputStream is = new ByteArrayInputStream(data.getBytes())) {
return importer.readColumnsInfo(mapping, is);
}
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册