未验证 提交 55c8f9ba 编写于 作者: J Jesse Yang 提交者: GitHub

feat(explore): allow opening charts with missing dataset (#12705)

上级 29ad78e1
......@@ -99,7 +99,7 @@ describe('DatasourceControl', () => {
const wrapper = setup();
const alert = wrapper.find(Icon);
expect(alert.at(1).prop('name')).toBe('alert-solid');
const tooltip = wrapper.find(Tooltip).at(1);
const tooltip = wrapper.find(Tooltip).at(0);
expect(tooltip.prop('title')).toBe(
defaultProps.datasource.health_check_message,
);
......
......@@ -28,6 +28,7 @@ import { DropDownProps } from 'antd/lib/dropdown';
*/
// eslint-disable-next-line no-restricted-imports
export {
Alert,
AutoComplete,
Avatar,
Button,
......
......@@ -29,9 +29,9 @@ export type ControlProps = {
// signature to the original action factory.
actions: Partial<ExploreActions> & Pick<ExploreActions, 'setControlValue'>;
type: ControlType;
label: string;
label?: ReactNode;
name: string;
description?: string;
description?: ReactNode;
tooltipOnClick?: () => ReactNode;
places?: number;
rightNode?: ReactNode;
......
......@@ -17,57 +17,25 @@
* under the License.
*/
import React, { useEffect, useState } from 'react';
import { styled, t, QueryFormData } from '@superset-ui/core';
import { styled, t } from '@superset-ui/core';
import { Collapse } from 'src/common/components';
import {
ColumnOption,
MetricOption,
ControlType,
ControlConfig,
DatasourceMeta,
} from '@superset-ui/chart-controls';
import { debounce } from 'lodash';
import { matchSorter, rankings } from 'match-sorter';
import { ExploreActions } from '../actions/exploreActions';
import Control from './Control';
interface DatasourceControl {
validationErrors: Array<any>;
mapStateToProps: QueryFormData;
type: ControlType;
label: string;
datasource?: DatasourceControl;
interface DatasourceControl extends ControlConfig {
datasource?: DatasourceMeta;
}
type Columns = {
column_name: string;
description: string | undefined;
expression: string | undefined;
filterable: boolean;
groupby: string | undefined;
id: number;
is_dttm: boolean;
python_date_format: string;
type: string;
verbose_name: string;
};
type Metrics = {
certification_details: string | undefined;
certified_by: string | undefined;
d3format: string | undefined;
description: string | undefined;
expression: string;
id: number;
is_certified: boolean;
metric_name: string;
verbose_name: string;
warning_text: string;
};
interface Props {
datasource: {
columns: Array<Columns>;
metrics: Array<Metrics>;
};
datasource: DatasourceMeta;
controls: {
datasource: DatasourceControl;
};
......@@ -193,15 +161,8 @@ export default function DataSourcePanel({
const metricSlice = lists.metrics.slice(0, 50);
const columnSlice = lists.columns.slice(0, 50);
return (
<DatasourceContainer>
<Control
{...datasourceControl}
name="datasource"
validationErrors={datasourceControl.validationErrors}
actions={actions}
formData={datasourceControl.mapStateToProps}
/>
const mainBody = (
<>
<input
type="text"
onChange={evt => {
......@@ -245,6 +206,13 @@ export default function DataSourcePanel({
</Collapse.Panel>
</Collapse>
</div>
</>
);
return (
<DatasourceContainer>
<Control {...datasourceControl} name="datasource" actions={actions} />
{datasource.id != null && mainBody}
</DatasourceContainer>
);
}
......@@ -26,6 +26,8 @@ import Icon from 'src/components/Icon';
import ChangeDatasourceModal from 'src/datasource/ChangeDatasourceModal';
import DatasourceModal from 'src/datasource/DatasourceModal';
import { postForm } from 'src/explore/exploreUtils';
import Button from 'src/components/Button';
import ErrorAlert from 'src/components/ErrorMessage/ErrorAlert';
const propTypes = {
actions: PropTypes.object.isRequired,
......@@ -51,6 +53,9 @@ const Styles = styled.div`
border-bottom: 1px solid ${({ theme }) => theme.colors.grayscale.light2};
padding: ${({ theme }) => 2 * theme.gridUnit}px;
}
.error-alert {
margin: ${({ theme }) => 2 * theme.gridUnit}px;
}
.ant-dropdown-trigger {
margin-left: ${({ theme }) => 2 * theme.gridUnit}px;
box-shadow: none;
......@@ -152,6 +157,7 @@ class DatasourceControl extends React.PureComponent {
render() {
const { showChangeDatasourceModal, showEditDatasourceModal } = this.state;
const { datasource, onChange } = this.props;
const isMissingDatasource = datasource;
const datasourceMenu = (
<Menu onClick={this.handleMenuItemClick}>
{this.props.isEditable && (
......@@ -164,16 +170,22 @@ class DatasourceControl extends React.PureComponent {
</Menu>
);
// eslint-disable-next-line camelcase
const { health_check_message: healthCheckMessage } = datasource;
return (
<Styles className="DatasourceControl">
<div className="data-container">
<Icon name="dataset-physical" className="dataset-svg" />
<Tooltip title={datasource.name}>
<span className="title-select">{datasource.name}</span>
</Tooltip>
{/* Add a tooltip only for long dataset names */}
{!isMissingDatasource && datasource.name.length > 25 ? (
<Tooltip title={datasource.name}>
<span className="title-select">{datasource.name}</span>
</Tooltip>
) : (
<span title={datasource.name} className="title-select">
{datasource.name}
</span>
)}
{healthCheckMessage && (
<Tooltip title={healthCheckMessage}>
<Icon
......@@ -196,6 +208,35 @@ class DatasourceControl extends React.PureComponent {
</Tooltip>
</Dropdown>
</div>
{/* missing dataset */}
{isMissingDatasource && (
<div className="error-alert">
<ErrorAlert
level="warning"
title={t('Missing dataset')}
source="explore"
subtitle={
<>
<p>
{t(
'The dataset linked to this chart may have been deleted.',
)}
</p>
<p>
<Button
buttonStyle="primary"
onClick={() =>
this.handleMenuItemClick({ key: CHANGE_DATASET })
}
>
{t('Change dataset')}
</Button>
</p>
</>
}
/>
</div>
)}
{showEditDatasourceModal && (
<DatasourceModal
datasource={datasource}
......
......@@ -89,4 +89,4 @@ class DatasourceNotFoundValidationError(ValidationError):
status = 404
def __init__(self) -> None:
super().__init__([_("Datasource does not exist")], field_name="datasource_id")
super().__init__([_("Dataset does not exist")], field_name="datasource_id")
......@@ -17,7 +17,6 @@
from typing import List, Optional
from flask_appbuilder.security.sqla.models import User
from sqlalchemy.orm.exc import NoResultFound
from superset.commands.exceptions import (
DatasourceNotFoundValidationError,
......@@ -25,6 +24,7 @@ from superset.commands.exceptions import (
)
from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.extensions import db, security_manager
......@@ -53,5 +53,5 @@ def get_datasource_by_id(datasource_id: int, datasource_type: str) -> BaseDataso
return ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
)
except (NoResultFound, KeyError):
except DatasetNotFoundError:
raise DatasourceNotFoundValidationError()
......@@ -19,6 +19,8 @@ from typing import Dict, List, Optional, Set, Type, TYPE_CHECKING
from sqlalchemy import or_
from sqlalchemy.orm import Session, subqueryload
from superset.datasets.commands.exceptions import DatasetNotFoundError
if TYPE_CHECKING:
from collections import OrderedDict
......@@ -44,12 +46,23 @@ class ConnectorRegistry:
def get_datasource(
cls, datasource_type: str, datasource_id: int, session: Session
) -> "BaseDatasource":
return (
"""Safely get a datasource instance, raises `DatasetNotFoundError` if
`datasource_type` is not registered or `datasource_id` does not
exist."""
if datasource_type not in cls.sources:
raise DatasetNotFoundError()
datasource = (
session.query(cls.sources[datasource_type])
.filter_by(id=datasource_id)
.one()
.one_or_none()
)
if not datasource:
raise DatasetNotFoundError()
return datasource
@classmethod
def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]:
datasources: List["BaseDatasource"] = []
......
......@@ -39,7 +39,7 @@ from superset.views.base import (
BaseSupersetView,
DatasourceFilter,
DeleteMixin,
get_datasource_exist_error_msg,
get_dataset_exist_error_msg,
ListWidgetWithCheckboxes,
SupersetModelView,
validate_json,
......@@ -352,7 +352,7 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin
models.DruidDatasource.cluster_id == item.cluster_id,
)
if db.session.query(query.exists()).scalar():
raise Exception(get_datasource_exist_error_msg(item.full_name))
raise Exception(get_dataset_exist_error_msg(item.full_name))
def post_add(self, item: "DruidDatasourceModelView") -> None:
item.refresh_metrics()
......
......@@ -231,6 +231,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
try:
new_model = CreateDatasetCommand(g.user, item).run()
return self.response(201, id=new_model.id, result=item)
......
......@@ -26,7 +26,10 @@ from superset.commands.exceptions import (
ImportFailedError,
UpdateFailedError,
)
from superset.views.base import get_datasource_exist_error_msg
def get_dataset_exist_error_msg(full_name: str) -> str:
return _("Dataset %(name)s already exists", name=full_name)
class DatabaseNotFoundValidationError(ValidationError):
......@@ -54,7 +57,7 @@ class DatasetExistsValidationError(ValidationError):
def __init__(self, table_name: str) -> None:
super().__init__(
get_datasource_exist_error_msg(table_name), field_name="table_name"
[get_dataset_exist_error_msg(table_name)], field_name="table_name"
)
......@@ -142,7 +145,8 @@ class OwnersNotFoundValidationError(ValidationError):
class DatasetNotFoundError(CommandException):
message = "Dataset not found."
status = 404
message = _("Dataset does not exist")
class DatasetInvalidError(CommandInvalidError):
......
......@@ -21,7 +21,6 @@ from typing import Any, Callable, Dict, List, Optional
import yaml
from flask_appbuilder import Model
from sqlalchemy.orm import Session
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.session import make_transient
from superset import db
......@@ -56,14 +55,14 @@ def lookup_sqla_table(table: SqlaTable) -> Optional[SqlaTable]:
def lookup_sqla_database(table: SqlaTable) -> Optional[Database]:
try:
return (
db.session.query(Database)
.filter_by(database_name=table.params_dict["database_name"])
.one()
)
except NoResultFound:
database = (
db.session.query(Database)
.filter_by(database_name=table.params_dict["database_name"])
.one_or_none()
)
if database is None:
raise DatabaseNotFoundError
return database
def lookup_druid_cluster(datasource: DruidDatasource) -> Optional[DruidCluster]:
......
......@@ -206,7 +206,7 @@ class Slice(
"""
Returns a MD5 HEX digest that makes this dashboard unique
"""
return utils.md5_hex(self.params)
return utils.md5_hex(self.params or "")
@property
def thumbnail_url(self) -> str:
......
......@@ -23,7 +23,6 @@ from flask_appbuilder import Model
from sqlalchemy import Column, Enum, ForeignKey, Integer, String
from sqlalchemy.engine.base import Connection
from sqlalchemy.orm import relationship, Session, sessionmaker
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.mapper import Mapper
from superset.models.helpers import AuditMixinNullable
......@@ -89,13 +88,11 @@ class TaggedObject(Model, AuditMixinNullable):
def get_tag(name: str, session: Session, type_: TagTypes) -> Tag:
try:
tag = session.query(Tag).filter_by(name=name, type=type_).one()
except NoResultFound:
tag = session.query(Tag).filter_by(name=name, type=type_).one_or_none()
if tag is None:
tag = Tag(name=name, type=type_)
session.add(tag)
session.commit()
return tag
......
......@@ -225,7 +225,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": ["Datenquellen"],
"Dataset does not exist": ["Datenquellen"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Druid Datenquelle einfügen"],
......@@ -643,7 +643,7 @@
"Add Annotation Layer": ["Anmerkungstufe"],
"Edit Annotation Layer": ["Anmerkungstufe"],
"Name": ["Name"],
"Datasource %(name)s already exists": [""],
"Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
......
......@@ -776,7 +776,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "Datenquellen"
#: superset/common/query_object.py:301
......@@ -2303,7 +2303,7 @@ msgstr "Name"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
......
......@@ -200,7 +200,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": [""],
"Dataset does not exist": [""],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": [""],
......@@ -585,7 +585,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": [""],
"Datasource %(name)s already exists": [""],
"Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
......
......@@ -775,7 +775,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr ""
#: superset/common/query_object.py:301
......@@ -2302,7 +2302,7 @@ msgstr ""
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
......
......@@ -272,7 +272,7 @@
"Charts could not be deleted.": ["Los Gráficos no han podido eliminarse"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": ["Los propietarios son invalidos"],
"Datasource does not exist": ["La fuente no existe"],
"Dataset does not exist": ["La fuente no existe"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Añadiendo [{}] como nueva fuente"],
......@@ -696,7 +696,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": ["Nombre"],
"Datasource %(name)s already exists": [
"Dataset %(name)s already exists": [
"La fuente de datos %(name)s ya existe"
],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
......
......@@ -784,7 +784,7 @@ msgid "Owners are invalid"
msgstr "Los propietarios son invalidos"
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "La fuente no existe"
#: superset/common/query_object.py:301
......@@ -2336,7 +2336,7 @@ msgstr "Nombre"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr "La fuente de datos %(name)s ya existe"
#: superset/views/base.py:227
......
......@@ -277,9 +277,7 @@
"Charts could not be deleted.": ["La requête ne peut pas être chargée"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": [
"La source de données %(name)s existe déjà"
],
"Dataset does not exist": ["La source de données %(name)s existe déjà"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Ajouter une source de données Druid"],
......@@ -728,7 +726,7 @@
"Add Annotation Layer": ["Ajouter une couche d'annotation"],
"Edit Annotation Layer": ["Ajouter une couche d'annotation"],
"Name": ["Nom"],
"Datasource %(name)s already exists": [
"Dataset %(name)s already exists": [
"La source de données %(name)s existe déjà"
],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
......
......@@ -781,7 +781,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "La source de données %(name)s existe déjà"
#: superset/common/query_object.py:301
......@@ -2350,7 +2350,7 @@ msgstr "Nom"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr "La source de données %(name)s existe déjà"
#: superset/views/base.py:227
......
......@@ -237,7 +237,7 @@
"Charts could not be deleted.": ["La query non può essere caricata"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": ["Sorgente dati e tipo di grafico"],
"Dataset does not exist": ["Sorgente dati e tipo di grafico"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": [""],
......@@ -643,7 +643,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": ["Nome"],
"Datasource %(name)s already exists": [""],
"Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
......
......@@ -773,7 +773,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "Sorgente dati e tipo di grafico"
#: superset/common/query_object.py:301
......@@ -2331,7 +2331,7 @@ msgstr "Nome"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
......
......@@ -213,7 +213,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": ["データソース"],
"Dataset does not exist": ["データソース"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": [""],
......@@ -601,7 +601,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": [""],
"Name": ["名前"],
"Datasource %(name)s already exists": [""],
"Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
......
......@@ -772,7 +772,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "データソース"
#: superset/common/query_object.py:301
......@@ -2294,7 +2294,7 @@ msgstr "名前"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
......
......@@ -197,7 +197,7 @@
"Charts could not be deleted.": [""],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": ["데이터소스"],
"Dataset does not exist": ["데이터소스"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["새 데이터소스 스캔"],
......@@ -579,7 +579,7 @@
"Add Annotation Layer": [""],
"Edit Annotation Layer": ["주석 레이어"],
"Name": ["이름"],
"Datasource %(name)s already exists": [""],
"Dataset %(name)s already exists": [""],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
""
],
......
......@@ -772,7 +772,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "데이터소스"
#: superset/common/query_object.py:301
......@@ -2294,7 +2294,7 @@ msgstr "이름"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
......
......@@ -775,7 +775,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr ""
#: superset/common/query_object.py:301
......@@ -2306,7 +2306,7 @@ msgstr ""
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr ""
#: superset/views/base.py:227
......
......@@ -258,7 +258,7 @@
"Charts could not be deleted.": ["Não foi possível carregar a query"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": ["Origem de dados %(name)s já existe"],
"Dataset does not exist": ["Origem de dados %(name)s já existe"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Adicionar origem de dados Druid"],
......@@ -693,9 +693,7 @@
"Add Annotation Layer": ["Camadas de anotação"],
"Edit Annotation Layer": ["Camadas de anotação"],
"Name": ["Nome"],
"Datasource %(name)s already exists": [
"Origem de dados %(name)s já existe"
],
"Dataset %(name)s already exists": ["Origem de dados %(name)s já existe"],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
"Tabela [{}] não encontrada, por favor verifique conexão à base de dados, esquema e nome da tabela"
],
......
......@@ -783,7 +783,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "Origem de dados %(name)s já existe"
#: superset/common/query_object.py:297
......@@ -2363,7 +2363,7 @@ msgstr "Nome"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr "Origem de dados %(name)s já existe"
#: superset/views/base.py:227
......
......@@ -1150,9 +1150,7 @@
"Welcome!": ["Bem vindo!"],
"Test Connection": ["Conexão de teste"],
"Manage": ["Gerir"],
"Datasource %(name)s already exists": [
"Origem de dados %(name)s já existe"
],
"Dataset %(name)s already exists": ["Origem de dados %(name)s já existe"],
"json isn't valid": ["json não é válido"],
"Delete": ["Eliminar"],
"Delete all Really?": ["Tem a certeza que pretende eliminar tudo?"],
......
......@@ -328,7 +328,7 @@
"A importação do gráfico falhou por um motivo desconhecido"
],
"Owners are invalid": ["Donos inválidos"],
"Datasource does not exist": ["Fonte de dados não existe"],
"Dataset does not exist": ["Fonte de dados não existe"],
"`operation` property of post processing object undefined": [
"A propriedade `operation` do objeto de pós processamento está indefinida"
],
......@@ -935,9 +935,7 @@
"Add Annotation Layer": ["Adicionar camada de anotação"],
"Edit Annotation Layer": ["Editar camada de anotação"],
"Name": ["Nome"],
"Datasource %(name)s already exists": [
"Fonte de dados %(name)s já existe"
],
"Dataset %(name)s already exists": ["Fonte de dados %(name)s já existe"],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
"Não foi possível localizar a tabela [%{table}s], por favor revise sua conexão com o banco de dados, esquema e nome da tabela. Erro: {}"
],
......
......@@ -801,7 +801,7 @@ msgid "Owners are invalid"
msgstr "Donos inválidos"
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "Fonte de dados não existe"
#: superset/common/query_object.py:301
......@@ -2439,7 +2439,7 @@ msgstr "Nome"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr "Fonte de dados %(name)s já existe"
#: superset/views/base.py:227
......
......@@ -243,7 +243,7 @@
"Charts could not be deleted.": ["Запрос невозможно загрузить"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": ["Источник данных %(name)s уже существует"],
"Dataset does not exist": ["Источник данных %(name)s уже существует"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["Добавить Источник Данных Druid"],
......@@ -659,7 +659,7 @@
"Add Annotation Layer": ["Добавить слой аннотации"],
"Edit Annotation Layer": ["Добавить слой аннотации"],
"Name": ["Название"],
"Datasource %(name)s already exists": [
"Dataset %(name)s already exists": [
"Источник данных %(name)s уже существует"
],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
......
......@@ -782,7 +782,7 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgid "Dataset does not exist"
msgstr "Источник данных %(name)s уже существует"
#: superset/common/query_object.py:301
......@@ -2335,7 +2335,7 @@ msgstr "Название"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr "Источник данных %(name)s уже существует"
#: superset/views/base.py:227
......
......@@ -223,7 +223,7 @@
"Charts could not be deleted.": ["这个查询无法被加载"],
"Import chart failed for an unknown reason": [""],
"Owners are invalid": [""],
"Datasource does not exist": ["数据源%(name)s 已存在"],
"Dataset does not exist": ["数据集不存在"],
"`operation` property of post processing object undefined": [""],
"Unsupported post processing operation: %(operation)s": [""],
"Adding new datasource [{}]": ["添加 Druid 数据源"],
......@@ -617,7 +617,7 @@
"Add Annotation Layer": ["添加注释层"],
"Edit Annotation Layer": ["添加注释层"],
"Name": ["名字"],
"Datasource %(name)s already exists": ["数据源%(name)s 已存在"],
"Dataset %(name)s already exists": ["数据源%(name)s 已存在"],
"Table [%{table}s] could not be found, please double check your database connection, schema, and table name, error: {}": [
"找不到 [{}] 表,请仔细检查您的数据库连接、Schema 和 表名"
],
......
......@@ -773,8 +773,8 @@ msgid "Owners are invalid"
msgstr ""
#: superset/commands/exceptions.py:92
msgid "Datasource does not exist"
msgstr "数据源%(name)s 已存在"
msgid "Dataset does not exist"
msgstr "数据集不存在"
#: superset/common/query_object.py:301
msgid "`operation` property of post processing object undefined"
......@@ -2315,7 +2315,7 @@ msgstr "名字"
#: superset/views/base.py:207
#, python-format
msgid "Datasource %(name)s already exists"
msgid "Dataset %(name)s already exists"
msgstr "数据源%(name)s 已存在"
#: superset/views/base.py:227
......
......@@ -71,6 +71,7 @@ from flask import current_app, flash, g, Markup, render_template
from flask_appbuilder import SQLA
from flask_appbuilder.security.sqla.models import Role, User
from flask_babel import gettext as __
from flask_babel.speaklater import LazyString
from sqlalchemy import event, exc, select, Text
from sqlalchemy.dialects.mysql import MEDIUMTEXT
from sqlalchemy.engine import Connection, Engine
......@@ -504,6 +505,8 @@ def base_json_conv( # pylint: disable=inconsistent-return-statements,too-many-r
return obj.decode("utf-8")
except Exception: # pylint: disable=broad-except
return "[bytes]"
if isinstance(obj, LazyString):
return str(obj)
def json_iso_dttm_ser(obj: Any, pessimistic: bool = False) -> str:
......
......@@ -47,6 +47,7 @@ from superset import (
security_manager,
)
from superset.connectors.sqla import models
from superset.datasets.commands.exceptions import get_dataset_exist_error_msg
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
SupersetErrorException,
......@@ -203,10 +204,6 @@ def handle_api_exception(
return functools.update_wrapper(wraps, f)
def get_datasource_exist_error_msg(full_name: str) -> str:
return __("Datasource %(name)s already exists", name=full_name)
def validate_sqlatable(table: models.SqlaTable) -> None:
"""Checks the table existence in the database."""
with db.session.no_autoflush:
......@@ -216,7 +213,7 @@ def validate_sqlatable(table: models.SqlaTable) -> None:
models.SqlaTable.database_id == table.database.id,
)
if db.session.query(table_query.exists()).scalar():
raise Exception(get_datasource_exist_error_msg(table.full_name))
raise Exception(get_dataset_exist_error_msg(table.full_name))
# Fail before adding if the table can't be found
try:
......
......@@ -59,6 +59,7 @@ from superset import (
viz,
)
from superset.charts.dao import ChartDAO
from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import (
AnnotationDatasource,
......@@ -70,6 +71,7 @@ from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
from superset.dashboards.dao import DashboardDAO
from superset.databases.dao import DatabaseDAO
from superset.databases.filters import DatabaseFilter
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.exceptions import (
CacheLoadError,
CertificateException,
......@@ -293,7 +295,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
dar.datasource_type, dar.datasource_id, session,
)
if not datasource or security_manager.can_access_datasource(datasource):
# datasource does not exist anymore
# Dataset does not exist anymore
session.delete(dar)
session.commit()
......@@ -695,50 +697,47 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
)
}
)
flash(Markup(config["SIP_15_TOAST_MESSAGE"].format(url=url)))
error_redirect = "/chart/list/"
try:
datasource_id, datasource_type = get_datasource_info(
datasource_id, datasource_type, form_data
)
except SupersetException as ex:
flash(
_(
"Error occurred when opening the chart: %(error)s",
error=utils.error_msg_from_exception(ex),
),
"danger",
)
return redirect(error_redirect)
except SupersetException:
datasource_id = None
# fallback unkonw datasource to table type
datasource_type = SqlaTable.type
datasource = ConnectorRegistry.get_datasource(
cast(str, datasource_type), datasource_id, db.session
)
if not datasource:
flash(DATASOURCE_MISSING_ERR, "danger")
return redirect(error_redirect)
datasource: Optional[BaseDatasource] = None
if datasource_id is not None:
try:
datasource = ConnectorRegistry.get_datasource(
cast(str, datasource_type), datasource_id, db.session
)
except DatasetNotFoundError:
pass
datasource_name = datasource.name if datasource else _("[Missing Dataset]")
if config["ENABLE_ACCESS_REQUEST"] and (
not security_manager.can_access_datasource(datasource)
):
flash(
__(security_manager.get_datasource_access_error_msg(datasource)),
"danger",
)
return redirect(
"superset/request_access/?"
f"datasource_type={datasource_type}&"
f"datasource_id={datasource_id}&"
)
if datasource:
if config["ENABLE_ACCESS_REQUEST"] and (
not security_manager.can_access_datasource(datasource)
):
flash(
__(security_manager.get_datasource_access_error_msg(datasource)),
"danger",
)
return redirect(
"superset/request_access/?"
f"datasource_type={datasource_type}&"
f"datasource_id={datasource_id}&"
)
# if feature enabled, run some health check rules for sqla datasource
if hasattr(datasource, "health_check"):
datasource.health_check()
# if feature enabled, run some health check rules for sqla datasource
if hasattr(datasource, "health_check"):
datasource.health_check()
viz_type = form_data.get("viz_type")
if not viz_type and datasource.default_endpoint:
if not viz_type and datasource and datasource.default_endpoint:
return redirect(datasource.default_endpoint)
# slc perms
......@@ -771,25 +770,31 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
status=400,
)
if action in ("saveas", "overwrite"):
if action in ("saveas", "overwrite") and datasource:
return self.save_or_overwrite_slice(
slc,
slice_add_perm,
slice_overwrite_perm,
slice_download_perm,
datasource_id,
cast(str, datasource_type),
datasource.id,
datasource.type,
datasource.name,
)
standalone = (
request.args.get(utils.ReservedUrlParameters.STANDALONE.value) == "true"
)
dummy_datasource_data: Dict[str, Any] = {
"type": datasource_type,
"name": datasource_name,
"columns": [],
"metrics": [],
}
bootstrap_data = {
"can_add": slice_add_perm,
"can_download": slice_download_perm,
"can_overwrite": slice_overwrite_perm,
"datasource": datasource.data,
"datasource": datasource.data if datasource else dummy_datasource_data,
"form_data": form_data,
"datasource_id": datasource_id,
"datasource_type": datasource_type,
......@@ -799,15 +804,18 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
"forced_height": request.args.get("height"),
"common": common_bootstrap_payload(),
}
table_name = (
datasource.table_name
if datasource_type == "table"
else datasource.datasource_name
)
if slc:
title = slc.slice_name
else:
elif datasource:
table_name = (
datasource.table_name
if datasource_type == "table"
else datasource.datasource_name
)
title = _("Explore - %(table)s", table=table_name)
else:
title = _("Explore")
return self.render_template(
"superset/basic.html",
bootstrap_data=json.dumps(
......@@ -1626,6 +1634,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
table_name = request.args.get("table_name")
db_name = request.args.get("db_name")
extra_filters = request.args.get("extra_filters")
slices: List[Slice] = []
if not slice_id and not (table_name and db_name):
return json_error_response(
......
......@@ -20,7 +20,7 @@ from collections import Counter
from flask import request
from flask_appbuilder import expose
from flask_appbuilder.security.decorators import has_access_api
from sqlalchemy.orm.exc import NoResultFound
from flask_babel import _
from superset import db
from superset.connectors.connector_registry import ConnectorRegistry
......@@ -42,7 +42,7 @@ class Datasource(BaseSupersetView):
def save(self) -> FlaskResponse:
data = request.form.get("data")
if not isinstance(data, str):
return json_error_response("Request missing data field.", status=500)
return json_error_response(_("Request missing data field."), status=500)
datasource_dict = json.loads(data)
datasource_id = datasource_dict.get("id")
......@@ -58,9 +58,7 @@ class Datasource(BaseSupersetView):
try:
check_ownership(orm_datasource)
except SupersetSecurityException:
return json_error_response(
f"{DatasetForbiddenError.message}", DatasetForbiddenError.status
)
raise DatasetForbiddenError()
datasource_dict["owners"] = (
db.session.query(orm_datasource.owner_class)
......@@ -77,7 +75,11 @@ class Datasource(BaseSupersetView):
]
if duplicates:
return json_error_response(
f"Duplicate column name(s): {','.join(duplicates)}", status=409
_(
"Duplicate column name(s): %(columns)s",
columns=",".join(duplicates),
),
status=409,
)
orm_datasource.update_from_object(datasource_dict)
if hasattr(orm_datasource, "health_check"):
......@@ -92,17 +94,10 @@ class Datasource(BaseSupersetView):
@api
@handle_api_exception
def get(self, datasource_type: str, datasource_id: int) -> FlaskResponse:
try:
orm_datasource = ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
)
if not orm_datasource.data:
return json_error_response(
"Error fetching datasource data.", status=500
)
return self.json_response(orm_datasource.data)
except NoResultFound:
return json_error_response("This datasource does not exist", status=400)
datasource = ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
)
return self.json_response(datasource.data)
@expose("/external_metadata/<datasource_type>/<datasource_id>/")
@has_access_api
......@@ -112,11 +107,11 @@ class Datasource(BaseSupersetView):
self, datasource_type: str, datasource_id: int
) -> FlaskResponse:
"""Gets column info from the source system"""
datasource = ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
)
try:
datasource = ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
)
external_metadata = datasource.external_metadata()
return self.json_response(external_metadata)
except SupersetException as ex:
return json_error_response(str(ex), status=400)
return self.json_response(external_metadata)
......@@ -26,7 +26,7 @@ import simplejson as json
from flask import g, request
from flask_appbuilder.security.sqla import models as ab_models
from flask_appbuilder.security.sqla.models import User
from flask_babel import gettext as __
from flask_babel import _
from sqlalchemy.orm.exc import NoResultFound
import superset.models.core as models
......@@ -227,7 +227,7 @@ def get_datasource_info(
if not datasource_id:
raise SupersetException(
"The dataset associated with this chart no longer exists"
_("The dataset associated with this chart no longer exists")
)
datasource_id = int(datasource_id)
......@@ -489,7 +489,7 @@ def check_datasource_perms(
SupersetError(
error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR,
level=ErrorLevel.ERROR,
message=__("Could not determine datasource type"),
message=_("Could not determine datasource type"),
)
)
......@@ -505,7 +505,7 @@ def check_datasource_perms(
SupersetError(
error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR,
level=ErrorLevel.ERROR,
message=__("Could not find viz object"),
message=_("Could not find viz object"),
)
)
......
......@@ -18,6 +18,7 @@
"""Unit tests for Superset"""
import imp
import json
from contextlib import contextmanager
from typing import Any, Dict, Union, List, Optional
from unittest.mock import Mock, patch
......@@ -26,6 +27,7 @@ import pytest
from flask import Response
from flask_appbuilder.security.sqla import models as ab_models
from flask_testing import TestCase
from sqlalchemy.ext.declarative.api import DeclarativeMeta
from sqlalchemy.orm import Session
from tests.test_app import app
......@@ -495,3 +497,16 @@ class SupersetTestCase(TestCase):
else:
mock_method.assert_called_once_with("error", func_name)
return rv
@contextmanager
def db_insert_temp_object(obj: DeclarativeMeta):
"""Insert a temporary object in database; delete when done."""
session = db.session
try:
session.add(obj)
session.commit()
yield obj
finally:
session.delete(obj)
session.commit()
......@@ -527,8 +527,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
"datasource_id": 1,
"datasource_type": "unknown",
}
uri = f"api/v1/chart/"
rv = self.post_assert_metric(uri, chart_data, "post")
rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post")
self.assertEqual(rv.status_code, 400)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
......@@ -540,12 +539,11 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
"datasource_id": 0,
"datasource_type": "table",
}
uri = f"api/v1/chart/"
rv = self.post_assert_metric(uri, chart_data, "post")
rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post")
self.assertEqual(rv.status_code, 422)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
response, {"message": {"datasource_id": ["Datasource does not exist"]}}
response, {"message": {"datasource_id": ["Dataset does not exist"]}}
)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
......@@ -665,25 +663,26 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
Chart API: Test update validate datasource
"""
admin = self.get_user("admin")
chart = self.insert_chart("title", [admin.id], 1)
chart = self.insert_chart("title", owners=[admin.id], datasource_id=1)
self.login(username="admin")
chart_data = {"datasource_id": 1, "datasource_type": "unknown"}
uri = f"api/v1/chart/{chart.id}"
rv = self.put_assert_metric(uri, chart_data, "put")
rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put")
self.assertEqual(rv.status_code, 400)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
response,
{"message": {"datasource_type": ["Must be one of: druid, table, view."]}},
)
chart_data = {"datasource_id": 0, "datasource_type": "table"}
uri = f"api/v1/chart/{chart.id}"
rv = self.put_assert_metric(uri, chart_data, "put")
rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put")
self.assertEqual(rv.status_code, 422)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(
response, {"message": {"datasource_id": ["Datasource does not exist"]}}
response, {"message": {"datasource_id": ["Dataset does not exist"]}}
)
db.session.delete(chart)
db.session.commit()
......
......@@ -475,12 +475,11 @@ class TestDatasetApi(SupersetTestCase):
"database": energy_usage_ds.database_id,
"table_name": energy_usage_ds.table_name,
}
uri = "api/v1/dataset/"
rv = self.post_assert_metric(uri, table_data, "post")
rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post")
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {
"message": {"table_name": ["Datasource energy_usage already exists"]}
"message": {"table_name": ["Dataset energy_usage already exists"]}
}
def test_create_dataset_same_name_different_schema(self):
......@@ -838,7 +837,7 @@ class TestDatasetApi(SupersetTestCase):
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
expected_response = {
"message": {"table_name": ["Datasource ab_user already exists"]}
"message": {"table_name": ["Dataset ab_user already exists"]}
}
assert data == expected_response
db.session.delete(dataset)
......
......@@ -22,10 +22,11 @@ import pytest
from superset import app, ConnectorRegistry, db
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.utils.core import get_example_database
from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices
from .base_tests import SupersetTestCase
from .base_tests import db_insert_temp_object, SupersetTestCase
from .fixtures.datasource import datasource_post
......@@ -72,42 +73,28 @@ class TestDatasource(SupersetTestCase):
def test_external_metadata_for_malicious_virtual_table(self):
self.login(username="admin")
session = db.session
table = SqlaTable(
table_name="malicious_sql_table",
database=get_example_database(),
sql="delete table birth_names",
)
session.add(table)
session.commit()
table = self.get_table_by_name("malicious_sql_table")
url = f"/datasource/external_metadata/table/{table.id}/"
resp = self.get_json_resp(url)
assert "error" in resp
session.delete(table)
session.commit()
with db_insert_temp_object(table):
url = f"/datasource/external_metadata/table/{table.id}/"
resp = self.get_json_resp(url)
self.assertEqual(resp["error"], "Only `SELECT` statements are allowed")
def test_external_metadata_for_mutistatement_virtual_table(self):
self.login(username="admin")
session = db.session
table = SqlaTable(
table_name="multistatement_sql_table",
database=get_example_database(),
sql="select 123 as intcol, 'abc' as strcol;"
"select 123 as intcol, 'abc' as strcol",
)
session.add(table)
session.commit()
table = self.get_table_by_name("multistatement_sql_table")
url = f"/datasource/external_metadata/table/{table.id}/"
resp = self.get_json_resp(url)
assert "error" in resp
session.delete(table)
session.commit()
with db_insert_temp_object(table):
url = f"/datasource/external_metadata/table/{table.id}/"
resp = self.get_json_resp(url)
self.assertEqual(resp["error"], "Only single queries supported")
def compare_lists(self, l1, l2, key):
l2_lookup = {o.get(key): o for o in l2}
......@@ -251,7 +238,16 @@ class TestDatasource(SupersetTestCase):
del app.config["DATASET_HEALTH_CHECK"]
def test_get_datasource_failed(self):
pytest.raises(
DatasetNotFoundError,
lambda: ConnectorRegistry.get_datasource("table", 9999999, db.session),
)
self.login(username="admin")
url = f"/datasource/get/druid/500000/"
resp = self.get_json_resp(url)
self.assertEqual(resp.get("error"), "This datasource does not exist")
resp = self.get_json_resp("/datasource/get/druid/500000/", raise_on_error=False)
self.assertEqual(resp.get("error"), "Dataset does not exist")
resp = self.get_json_resp(
"/datasource/get/invalid-datasource-type/500000/", raise_on_error=False
)
self.assertEqual(resp.get("error"), "Dataset does not exist")
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册