未验证 提交 d66066b5 编写于 作者: C chenjian 提交者: GitHub

[Frontend] Add model convertion frontend (#1232)

* add paddle2onnx component

* add comments

* fix

* onnx2paddle

* filetype

* filetype2

* filetype3

* filetype4

* filetype5

* filetype5

* filetype6

* filetype7

* filetype7

* filetype8

* filetype8

* filetype8

* filetype8

* filetype9

* filetype10

* filetype11

* filetype12

* filetype13

* filetype14

* filetype15

* filetype16

* filetype17

* filetype18

* add animation for converting

* add animation for downloading

* remove unneccessary file

* optimize logic

* add text

* update

---------
Co-authored-by: Nxiaoyixin-cmd <1634228212@qq.com>
上级 092c0274
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
"error": "Error occurred", "error": "Error occurred",
"graph": "Graphs", "graph": "Graphs",
"dynamic_graph": "dynamic", "dynamic_graph": "dynamic",
"ToggleGraph": "X2Paddle", "x2paddle": "Model Convertion",
"static_graph": "static", "static_graph": "static",
"high-dimensional": "High Dimensional", "high-dimensional": "High Dimensional",
"profiler": "performance analysis", "profiler": "performance analysis",
......
...@@ -8,5 +8,26 @@ ...@@ -8,5 +8,26 @@
"warin-info4": "The model has been converted, please do not click again", "warin-info4": "The model has been converted, please do not click again",
"warin-info5": "Please upload the model file and convert it", "warin-info5": "Please upload the model file and convert it",
"warin-info6": "Model file has been converted, please do not click again", "warin-info6": "Model file has been converted, please do not click again",
"warin-info7": "Please upload the model file" "warin-info7": "Please upload the model file",
"Conversion": "Conversion",
"pdmodels": "pdmodels",
"pdiparams": "pdiparams",
"model": "model",
"opset_version": "opset_version",
"deploy_backend": "deploy_backend",
"lite_valid_places": "lite_valid_places",
"lite_model_type": "lite_model_type",
"convert_to_lite": "convert_to_lite",
"onnx_model": "onnx model",
"Download": "Download",
"Reload": "Reload",
"View": "View",
"Please": "Please select the file",
"isRequire": "This item is required",
"isYes": "Yes",
"isNo": "No",
"Paddle2OnnxTitle": "Paddle2Onnx model conversion configuration",
"Onnx2PaddleTitle": "Onnx2Paddle model conversion configuration",
"converting": "Converting now, please wait",
"downloading": "Downloading now, please wait"
} }
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
"error": "发生错误", "error": "发生错误",
"graph": "网络结构", "graph": "网络结构",
"dynamic_graph": "动态", "dynamic_graph": "动态",
"ToggleGraph": "X2Paddle", "x2paddle": "模型转换",
"static_graph": "静态", "static_graph": "静态",
"high-dimensional": "数据降维", "high-dimensional": "数据降维",
"profiler": "性能分析", "profiler": "性能分析",
......
...@@ -8,5 +8,26 @@ ...@@ -8,5 +8,26 @@
"warin-info4": "模型已转换,请勿再次点击", "warin-info4": "模型已转换,请勿再次点击",
"warin-info5": "请上传模型文件并转换", "warin-info5": "请上传模型文件并转换",
"warin-info6": "模型文件已转换,请勿再次点击", "warin-info6": "模型文件已转换,请勿再次点击",
"warin-info7": "请上传模型文件" "warin-info7": "请上传模型文件",
"Conversion": "转换",
"pdmodels": "模型结构文件",
"pdiparams": "模型参数文件",
"model": "模型",
"opset_version": "op集合版本",
"deploy_backend": "部署后端类型",
"lite_valid_places": "Lite后端类型",
"lite_model_type": "Lite模型类型",
"convert_to_lite": "是否转成Paddle-Lite支持格式",
"onnx_model": "onnx模型",
"Download": "下载",
"Reload": "重新载入",
"View": "视图",
"Please": "请上传模型文件",
"isRequire": "该项为必填项",
"isYes": "是",
"isNo": "否",
"Paddle2OnnxTitle": "Paddle2Onnx模型转换配置",
"Onnx2PaddleTitle": "Onnx2Paddle模型转换配置",
"converting": "转换中,请稍等片刻",
"downloading": "文件下载中,请稍等片刻"
} }
/**
* Copyright 2020 Baidu Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React, {FunctionComponent} from 'react';
import {WithStyled, contentHeight, contentMargin, headerHeight, position, transitionProps} from '~/utils/style';
import BodyLoading from '~/components/BodyLoading';
import styled from 'styled-components';
const Section = styled.section`
display: flex;
font-family: PingFangSC-Regular;
aside {
position: static;
height: auto;
}
`;
const Article = styled.article`
flex: auto;
min-width: 0;
margin: ${contentMargin};
min-height: ${contentHeight};
`;
const Aside = styled.aside`
flex: none;
background-color: var(--background-color);
height: ${`calc(100vh - ${headerHeight})`};
${position('sticky', headerHeight, 0, null, null)}
width:18.571428571428573rem;
overflow-x: hidden;
overflow-y: auto;
${transitionProps('background-color')}
`;
const ProfilerAside = styled.aside`
flex: none;
background-color: var(--background-color);
height: auto;
position: static;
overflow-x: hidden;
overflow-y: auto;
${transitionProps('background-color')}
`;
type ContentProps = {
aside?: React.ReactNode;
leftAside?: React.ReactNode;
loading?: boolean;
isProfiler?: boolean;
show?: boolean;
nodeShow?: boolean;
};
const Content: FunctionComponent<ContentProps & WithStyled> = ({
children,
aside,
leftAside,
loading,
className,
isProfiler,
show,
nodeShow
}) => (
<Section className={className}>
{leftAside && <Aside>{leftAside}</Aside>}
<Article>{children}</Article>
{aside && isProfiler ? (
<ProfilerAside>{aside}</ProfilerAside>
) : (
// `${`calc(100vh - ${headerHeight})`}`
<Aside
style={{
display: aside ? 'inline-block' : 'none',
height: aside
? nodeShow
? 'auto'
: `${`calc(100vh - 13.28571rem)`}`
: show
? nodeShow
? 'auto'
: `${`calc(100vh - 13.28571rem)`}`
: '0px',
position: show ? 'relative' : 'absolute',
top: '0px'
// height: '0px',
// 此时处于分屏且不选中的情况
// width: '260px',
}}
/* display: inline-block; */
// height: calc(100vh - 13.2857rem);
// position: relative;
// top: 0px;
// height: 0px;
// width: 260px;
>
{aside}
</Aside>
)}
{loading && <BodyLoading />}
</Section>
);
export default Content;
...@@ -71,6 +71,7 @@ const Content = styled.div` ...@@ -71,6 +71,7 @@ const Content = styled.div`
> iframe { > iframe {
${size('100%', '100%')} ${size('100%', '100%')}
// ${size('50%', '100%')}
border: none; border: none;
} }
......
...@@ -23,6 +23,8 @@ import ChartToolbox from '~/components/ChartToolbox'; ...@@ -23,6 +23,8 @@ import ChartToolbox from '~/components/ChartToolbox';
import HashLoader from 'react-spinners/HashLoader'; import HashLoader from 'react-spinners/HashLoader';
import logo from '~/assets/images/netron.png'; import logo from '~/assets/images/netron.png';
import netron2 from '@visualdl/netron2'; import netron2 from '@visualdl/netron2';
import netron from '@visualdl/netron';
import styled from 'styled-components'; import styled from 'styled-components';
import {toast} from 'react-toastify'; import {toast} from 'react-toastify';
import useTheme from '~/hooks/useTheme'; import useTheme from '~/hooks/useTheme';
...@@ -72,6 +74,7 @@ const Content = styled.div` ...@@ -72,6 +74,7 @@ const Content = styled.div`
height: calc(100% - ${toolboxHeight}); height: calc(100% - ${toolboxHeight});
> iframe { > iframe {
// ${size('50%', '100%')}
${size('100%', '100%')} ${size('100%', '100%')}
border: none; border: none;
} }
...@@ -300,9 +303,17 @@ const Graph = React.forwardRef<GraphRef, GraphProps>( ...@@ -300,9 +303,17 @@ const Graph = React.forwardRef<GraphRef, GraphProps>(
tooltipPlacement="bottom" tooltipPlacement="bottom"
/> />
<Content> <Content>
{/* <iframe
// ref={iframe}
src={PUBLIC_PATH + netron2}
frameBorder={0}
scrolling="no"
marginWidth={0}
marginHeight={0}
></iframe> */}
<iframe <iframe
ref={iframe} ref={iframe}
src={PUBLIC_PATH + netron2} src={PUBLIC_PATH + netron}
frameBorder={0} frameBorder={0}
scrolling="no" scrolling="no"
marginWidth={0} marginWidth={0}
......
/**
* Copyright 2020 Baidu Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type {Documentation, OpenedResult, Properties, SearchItem, SearchResult} from '~/resource/graph/types';
import React, {useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState} from 'react';
import {contentHeight, position, primaryColor, rem, size, transitionProps} from '~/utils/style';
import ChartToolbox from '~/components/ChartToolbox';
import HashLoader from 'react-spinners/HashLoader';
import logo from '~/assets/images/netron.png';
import netron from '@visualdl/netron';
import netron2 from '@visualdl/netron2';
import styled from 'styled-components';
import {toast} from 'react-toastify';
import useTheme from '~/hooks/useTheme';
import {useTranslation} from 'react-i18next';
const PUBLIC_PATH: string = import.meta.env.SNOWPACK_PUBLIC_PATH;
let IFRAME_HOST = `${window.location.protocol}//${window.location.host}`;
if (PUBLIC_PATH.startsWith('http')) {
const url = new URL(PUBLIC_PATH);
IFRAME_HOST = `${url.protocol}//${url.host}`;
}
const toolboxHeight = rem(40);
const Wrapper = styled.div`
position: relative;
height: ${contentHeight};
background-color: var(--background-color);
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
${transitionProps('background-color')}
`;
const RenderContent = styled.div<{show: boolean}>`
position: absolute;
top: 0;
left: 0;
${size('100%', '100%')}
opacity: ${props => (props.show ? 1 : 0)};
z-index: ${props => (props.show ? 0 : -1)};
pointer-events: ${props => (props.show ? 'auto' : 'none')};
`;
const Toolbox = styled(ChartToolbox)`
height: ${toolboxHeight};
border-bottom: 1px solid var(--border-color);
padding: 0 ${rem(20)};
${transitionProps('border-color')}
`;
const Content = styled.div`
position: relative;
height: calc(100% - ${toolboxHeight});
> iframe {
${size('100%', '100%')}
// ${size('50%', '100%')}
border: none;
}
> .powered-by {
display: block;
${position('absolute', null, null, rem(20), rem(30))}
color: var(--graph-copyright-color);
font-size: ${rem(14)};
user-select: none;
img {
height: 1em;
filter: var(--graph-copyright-logo-filter);
vertical-align: middle;
}
}
`;
const Loading = styled.div`
${size('100%', '100%')}
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
overscroll-behavior: none;
cursor: progress;
font-size: ${rem(16)};
line-height: ${rem(60)};
`;
export type GraphRef = {
export(type: 'svg' | 'png'): void;
changeGraph(name: string): void;
search(value: string): void;
select(item: SearchItem): void;
showModelProperties(): void;
showNodeDocumentation(data: Properties): void;
};
type GraphProps = {
files: FileList | File[] | null;
uploader: JSX.Element;
showAttributes: boolean;
showInitializers: boolean;
showNames: boolean;
horizontal: boolean;
onRendered?: (flag: boolean) => unknown;
onOpened?: (data: OpenedResult) => unknown;
onSearch?: (data: SearchResult) => unknown;
onShowModelProperties?: (data: Properties) => unknown;
onShowNodeProperties?: (data: Properties) => unknown;
onShowNodeDocumentation?: (data: Documentation) => unknown;
};
const Graph = React.forwardRef<GraphRef, GraphProps>(
(
{
files,
uploader,
showAttributes,
showInitializers,
showNames,
horizontal,
onRendered,
onOpened,
onSearch,
onShowModelProperties,
onShowNodeProperties,
onShowNodeDocumentation
},
ref
) => {
const {t} = useTranslation('graph');
const theme = useTheme();
const [ready, setReady] = useState(false);
const [loading, setLoading] = useState(false);
const [rendered, setRendered] = useState(false);
const iframe = useRef<HTMLIFrameElement>(null);
const handler = useCallback(
(event: MessageEvent) => {
if (event.data) {
const {type, data} = event.data;
switch (type) {
case 'status':
switch (data) {
case 'ready':
return setReady(true);
case 'loading':
return setLoading(true);
case 'rendered':
setLoading(false);
setRendered(true);
// debugger;
onRendered?.(true);
return;
}
return;
case 'opened':
return onOpened?.(data);
case 'search':
return onSearch?.(data);
case 'cancel':
return setLoading(false);
case 'error':
toast.error(data);
setLoading(false);
return;
case 'show-model-properties':
return onShowModelProperties?.(data);
case 'show-node-properties':
return onShowNodeProperties?.(data);
case 'show-node-documentation':
return onShowNodeDocumentation?.(data);
}
}
},
[onRendered, onOpened, onSearch, onShowModelProperties, onShowNodeProperties, onShowNodeDocumentation]
);
const dispatch = useCallback((type: string, data?: unknown) => {
iframe.current?.contentWindow?.postMessage(
{
type,
data
},
IFRAME_HOST
);
}, []);
useEffect(() => {
window.addEventListener('message', handler);
dispatch('ready');
return () => {
window.removeEventListener('message', handler);
};
}, [handler, dispatch]);
useEffect(() => {
console.log('GraphStaticss', files, ready);
(ready && dispatch('change-files', files)) || undefined;
}, [dispatch, files, ready]);
useEffect(
() => (ready && dispatch('toggle-attributes', showAttributes)) || undefined,
[dispatch, showAttributes, ready]
);
useEffect(
() => (ready && dispatch('toggle-initializers', showInitializers)) || undefined,
[dispatch, showInitializers, ready]
);
useEffect(() => (ready && dispatch('toggle-names', showNames)) || undefined, [dispatch, showNames, ready]);
useEffect(
() => (ready && dispatch('toggle-direction', horizontal)) || undefined,
[dispatch, horizontal, ready]
);
useEffect(() => (ready && dispatch('toggle-theme', theme)) || undefined, [dispatch, theme, ready]);
useImperativeHandle(ref, () => ({
export(type) {
dispatch('export', type);
},
changeGraph(name) {
dispatch('change-graph', name);
},
search(value) {
dispatch('search', value);
},
select(item) {
dispatch('select', item);
},
showModelProperties() {
dispatch('show-model-properties');
},
showNodeDocumentation(data) {
dispatch('show-node-documentation', data);
}
}));
const content = useMemo(() => {
if (!ready || loading) {
return (
<Loading>
<HashLoader size="60px" color={primaryColor} />
</Loading>
);
}
if (!files) {
// debugger;
return uploader;
}
if (ready && !rendered) {
// debugger;
return uploader;
}
return null;
}, [ready, loading, rendered, uploader, files]);
const shows = !loading && rendered && files;
return (
<Wrapper>
{content}
<RenderContent show={shows ? true : false}>
<Toolbox
items={[
{
icon: 'zoom-in',
tooltip: t('graph:zoom-in'),
onClick: () => dispatch('zoom-in')
},
{
icon: 'zoom-out',
tooltip: t('graph:zoom-out'),
onClick: () => dispatch('zoom-out')
},
{
icon: 'restore-size',
tooltip: t('graph:restore-size'),
onClick: () => dispatch('zoom-reset')
}
]}
reversed
tooltipPlacement="bottom"
/>
<Content>
{/* <iframe
ref={iframe}
src={PUBLIC_PATH + netron}
frameBorder={0}
scrolling="no"
marginWidth={0}
marginHeight={0}
></iframe> */}
<iframe
ref={iframe}
src={PUBLIC_PATH + netron}
frameBorder={0}
scrolling="no"
marginWidth={0}
marginHeight={0}
></iframe>
<a
className="powered-by"
href="https://github.com/lutzroeder/netron"
target="_blank"
rel="noreferrer"
>
Powered by <img src={PUBLIC_PATH + logo} alt="netron" />
</a>
</Content>
</RenderContent>
</Wrapper>
);
}
);
Graph.displayName = 'Graph';
export default Graph;
/* eslint-disable react-hooks/rules-of-hooks */
import React, {useState} from 'react';
import {Form, Input, Radio, Select} from 'antd';
import type {UploadProps} from 'antd';
import Buttons from '~/components/Button';
import {axios_fetcher} from '~/utils/fetch';
import {message} from 'antd';
import {useTranslation} from 'react-i18next';
import {Progress} from 'antd';
const {Option} = Select;
export default function xpaddleUploader(props: any) {
const [form] = Form.useForm();
const {t} = useTranslation(['togglegraph', 'common']);
const formLayout: any = {labelCol: {span: 4}, wrapperCol: {span: 14}};
const [convertProcess, setConvertProgress] = useState(0);
const [convertProcessFlag, setconvertProcessFlag] = useState(false);
const Uploadprops: UploadProps = {
name: 'file',
action: '',
headers: {
authorization: 'authorization-text'
},
onChange(info) {
// debugger;
if (info.file.status !== 'uploading') {
console.log(info.file, info.fileList);
}
if (info.file.status === 'done') {
message.success(`${info.file.name} file uploaded successfully`);
} else if (info.file.status === 'error') {
message.error(`${info.file.name} file upload failed.`);
}
}
};
const LiteBackend = [
'arm',
'opencl',
'x86',
'metal',
'xpu',
'bm',
'mlu',
'intel_fpga',
'huawei_ascend_npu',
'imagination_nna',
'rockchip_npu',
'mediatek_apu',
'huawei_kirin_npu',
'amlogic_npu'
];
const lite_model_type = ['protobuf', 'naive_buffer'];
const base64UrlToFile = (base64Url: any, filename: any) => {
// const arr = base64Url.split(',');
// const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(base64Url);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n--) {
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename);
};
const submodel = async () => {
props.changeLoading(true);
const values = await form.validateFields();
const formData = new FormData();
const onnx_file_component = document.getElementById('upload_onnx_model_file') as HTMLInputElement;
const onnx_file = onnx_file_component!.files![0];
formData.append('convert_to_lite', values.convertToLite);
formData.append('model', onnx_file);
formData.append('lite_valid_places', values.liteValidPlaces);
formData.append('lite_model_type:', values.liteModelType);
axios_fetcher(
`/inference/onnx2paddle/convert`,
{
method: 'POST',
body: formData
},
{
onDownloadProgress: function (axiosProgressEvent: any) {
setConvertProgress(Math.round(axiosProgressEvent.progress! * 100));
setconvertProcessFlag(true);
}
}
)
.then(
(res: any) => {
const files2 = base64UrlToFile(res.model, 'model.pdmodel');
props.setFiles([onnx_file]);
props.changeFiles2([files2]);
const current_date = new Date();
const filename = `${current_date.getFullYear()}_${current_date.getMonth()}_${current_date.getDay()}_${current_date.getHours()}_${current_date.getMinutes()}_${current_date.getSeconds()}_paddlemodel.tar`;
props.downloadEvent(res['request_id'], filename);
},
res => {
props.changeLoading(false);
console.log(res);
}
)
.finally(() => {
setconvertProcessFlag(false);
});
};
return (
<div>
<div
style={{
textAlign: 'center',
margin: '40px',
fontSize: '26px'
}}
>
{t('togglegraph:Onnx2PaddleTitle')}
</div>
<Form layout={formLayout} form={form} initialValues={{layout: formLayout}} style={{maxWidth: 600}}>
<Form.Item
label={t('togglegraph:model')}
name="model"
rules={[{required: true, message: t('isRequire')}]}
>
<Input type="file" id="upload_onnx_model_file" accept=".onnx" />
</Form.Item>
<Form.Item
name="convertToLite"
label={t('togglegraph:convert_to_lite')}
rules={[{required: true, message: t('isRequire')}]}
initialValue="no"
>
<Radio.Group>
<Radio value="yes">{t('togglegraph:isYes')}</Radio>
<Radio value="no">{t('togglegraph:isNo')}</Radio>
</Radio.Group>
</Form.Item>
<Form.Item
label={t('togglegraph:lite_valid_places')}
name="liteValidPlaces"
rules={[{required: false}]}
initialValue="arm"
>
<Select placeholder="Please select a lite place">
{LiteBackend.map((item: string) => {
return (
<Option value={item} key={item}>
{item}
</Option>
);
})}
</Select>
</Form.Item>
<Form.Item
label={t('togglegraph:lite_model_type')}
name="liteModelType"
rules={[{required: false}]}
initialValue="naive_buffer"
>
<Select placeholder="Please select a lite model type">
{lite_model_type.map((item: string) => {
return (
<Option value={item} key={item}>
{item}
</Option>
);
})}
</Select>
</Form.Item>
{/* <Form.Item>
<Button type="primary">Submit</Button>
</Form.Item> */}
</Form>
<div
style={{
textAlign: 'center'
}}
>
<Buttons
onClick={() => {
setConvertProgress(0);
setconvertProcessFlag(true);
submodel();
}}
>
{t('Conversion')}
</Buttons>
{convertProcessFlag ? <Progress type="circle" percent={convertProcess} /> : null}
{convertProcessFlag ? <h1> {t('togglegraph:converting')} </h1> : null}
</div>
</div>
);
}
/* eslint-disable react-hooks/rules-of-hooks */
/* eslint-disable prettier/prettier */
import React, {useState} from 'react';
import {Form, Input, Radio, Select} from 'antd';
import {UploadOutlined} from '@ant-design/icons';
import type {UploadProps} from 'antd';
import Buttons from '~/components/Button';
import {message, Upload, Button} from 'antd';
import {fetcher, axios_fetcher} from '~/utils/fetch';
import {useTranslation} from 'react-i18next';
import {Progress, Space} from 'antd';
const {Option} = Select;
export default function xpaddleUploader(props: any) {
const [form] = Form.useForm();
const formLayout: any = {labelCol: {span: 4}, wrapperCol: {span: 14}};
const {t} = useTranslation(['togglegraph', 'common']);
const [convertProcess, setConvertProgress] = useState(0);
const [convertProcessFlag, setconvertProcessFlag] = useState(false);
const Uploadprops: UploadProps = {
name: 'file',
action: '',
headers: {
authorization: 'authorization-text'
},
onChange(info) {
// debugger;
if (info.file.status !== 'uploading') {
console.log(info.file, info.fileList);
}
if (info.file.status === 'done') {
message.success(`${info.file.name} file uploaded successfully`);
} else if (info.file.status === 'error') {
message.error(`${info.file.name} file upload failed.`);
}
}
};
const LiteBackend = [7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
const lite_model_type = ['onnxruntime', 'tensorrt', 'others'];
const base64UrlToFile = (base64Url: any, filename: any) => {
// const arr = base64Url.split(',');
// const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(base64Url);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n--) {
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename);
};
const submodel = async () => {
props.changeLoading(true);
const values = await form.validateFields();
// debugger;
const formData = new FormData();
const model_file_component = document.getElementById('upload_pd_model_file') as HTMLInputElement;
const model_file = model_file_component!.files![0];
const param_file_component = document.getElementById('upload_pd_param_file') as HTMLInputElement;
const param_file = param_file_component!.files![0];
formData.append('model', model_file);
formData.append('param', param_file);
formData.append('opset_version', values['opset_version']);
formData.append('deploy_backend:', values['deployBackend']);
axios_fetcher(
`/inference/paddle2onnx/convert`,
{
method: 'POST',
body: formData
},
{
onDownloadProgress: function (axiosProgressEvent: any) {
setConvertProgress(Math.round(axiosProgressEvent.progress! * 100));
setconvertProcessFlag(true);
}
}
)
.then(
(res: any) => {
const files2 = base64UrlToFile(res.model, 'model.onnx');
props.setFiles([model_file]);
props.changeFiles2([files2]);
const current_date = new Date();
const filename = `${current_date.getFullYear()}_${current_date.getMonth()}_${current_date.getDay()}_${current_date.getHours()}_${current_date.getMinutes()}_${current_date.getSeconds()}_onnxmodel.onnx`;
props.downloadEvent(res['request_id'], filename);
},
res => {
// debugger;
props.changeLoading(false);
console.log(res);
}
)
.finally(() => {
setconvertProcessFlag(false);
});
};
return (
<div>
<div
style={{
textAlign: 'center',
margin: '40px',
fontSize: '26px'
}}
>
{t('togglegraph:Paddle2OnnxTitle')}
</div>
<Form
// {...formItemLayout}
layout={formLayout}
form={form}
initialValues={{layout: formLayout}}
style={{maxWidth: 600}}
>
<Form.Item
label={t('togglegraph:pdmodels')}
name="model"
rules={[{required: true, message: t('isRequire')}]}
>
<Input type="file" id="upload_pd_model_file" accept=".pdmodel" />
</Form.Item>
<Form.Item
label={t('togglegraph:pdiparams')}
name="param"
rules={[{required: true, message: t('isRequire')}]}
>
<Input type="file" id="upload_pd_param_file" accept=".pdiparams" />
</Form.Item>
<Form.Item
label={t('togglegraph:opset_version')}
name="opset_version"
rules={[{required: false}]}
initialValue="11"
>
<Select placeholder="Please select a version">
{LiteBackend.map((item: number) => {
return (
<Option value={item} key={item}>
{item}
</Option>
);
})}
</Select>
</Form.Item>
<Form.Item
label={t('togglegraph:deploy_backend')}
name="deployBackend"
rules={[{required: false}]}
initialValue="onnxruntime"
>
<Select placeholder="Please select a version">
{lite_model_type.map((item: string) => {
return (
<Option value={item} key={item}>
{item}
</Option>
);
})}
</Select>
</Form.Item>
</Form>
<div
style={{
textAlign: 'center'
}}
>
<Buttons
onClick={() => {
setConvertProgress(0);
setconvertProcessFlag(true);
submodel();
}}
>
{t('togglegraph:Conversion')}
</Buttons>
{convertProcessFlag ? (
<Progress type="circle" className="processCircle" percent={convertProcess} />
) : null}
{convertProcessFlag ? <h1> {t('togglegraph:converting')} </h1> : null}
</div>
</div>
);
}
.progressCircle{
align-items: center;
}
\ No newline at end of file
...@@ -27,7 +27,7 @@ import {useDispatch} from 'react-redux'; ...@@ -27,7 +27,7 @@ import {useDispatch} from 'react-redux';
import type {BlobResponse} from '~/utils/fetch'; import type {BlobResponse} from '~/utils/fetch';
import Button from '~/components/Button'; import Button from '~/components/Button';
import Checkbox from '~/components/Checkbox'; import Checkbox from '~/components/Checkbox';
import Content from '~/components/Content'; import Content from '~/components/ContentXpaddle';
import Field from '~/components/Field'; import Field from '~/components/Field';
import HashLoader from 'react-spinners/HashLoader'; import HashLoader from 'react-spinners/HashLoader';
import ModelPropertiesDialog from '~/components/GraphPage/ModelPropertiesDialog'; import ModelPropertiesDialog from '~/components/GraphPage/ModelPropertiesDialog';
...@@ -110,6 +110,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru ...@@ -110,6 +110,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru
const {loading} = useRequest<BlobResponse>(files ? null : '/graph/graph'); const {loading} = useRequest<BlobResponse>(files ? null : '/graph/graph');
const setModelFile = useCallback( const setModelFile = useCallback(
(f: FileList | File[]) => { (f: FileList | File[]) => {
// debugger;
storeDispatch(actions.graph.setModel(f)); storeDispatch(actions.graph.setModel(f));
setFiles(f); setFiles(f);
}, },
...@@ -224,7 +225,6 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru ...@@ -224,7 +225,6 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru
</Aside> </Aside>
); );
} }
console.log('nodeData && renderedflag3', nodeData, renderedflag3);
if (nodeData && renderedflag3) { if (nodeData && renderedflag3) {
return ( return (
...@@ -290,6 +290,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru ...@@ -290,6 +290,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru
</RadioGroup> </RadioGroup>
</Field> </Field>
</AsideSection> </AsideSection>
<AsideSection> <AsideSection>
<Field label={t('graph:export-file')}> <Field label={t('graph:export-file')}>
<ExportButtonWrapper> <ExportButtonWrapper>
...@@ -332,12 +333,13 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru ...@@ -332,12 +333,13 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeRendered, show = tru
() => <Uploader onClickUpload={onClickFile} onDropFiles={setModelFile} />, () => <Uploader onClickUpload={onClickFile} onDropFiles={setModelFile} />,
[onClickFile, setModelFile] [onClickFile, setModelFile]
); );
// const flags = false;
const flags = files && show;
return ( return (
<> <>
<Title>{t('common:graph')}</Title> <Title>{t('common:graph')}</Title>
<ModelPropertiesDialog data={modelData} onClose={() => setModelData(null)} /> <ModelPropertiesDialog data={modelData} onClose={() => setModelData(null)} />
<Content aside={aside}> <Content show={show} aside={flags ? aside : null}>
{loading ? ( {loading ? (
<Loading> <Loading>
<HashLoader size="60px" color={primaryColor} /> <HashLoader size="60px" color={primaryColor} />
......
...@@ -17,17 +17,19 @@ ...@@ -17,17 +17,19 @@
import Aside, {AsideSection} from '~/components/Aside'; import Aside, {AsideSection} from '~/components/Aside';
import type {Documentation, OpenedResult, Properties, SearchItem, SearchResult} from '~/resource/graph/types'; import type {Documentation, OpenedResult, Properties, SearchItem, SearchResult} from '~/resource/graph/types';
import GraphComponent, {GraphRef} from '~/components/GraphPage/GraphStatic'; import GraphComponent, {GraphRef} from '~/components/GraphPage/GraphStatic3';
import React, {useImperativeHandle, useCallback, useEffect, useMemo, useRef, useState} from 'react'; import React, {useImperativeHandle, useCallback, useEffect, useMemo, useRef, useState} from 'react';
import Select, {SelectProps} from '~/components/Select'; import Select, {SelectProps} from '~/components/Select';
import {actions} from '~/store'; import {actions} from '~/store';
import {primaryColor, rem, size} from '~/utils/style'; import {primaryColor, rem, size} from '~/utils/style';
import {useDispatch} from 'react-redux'; import {useDispatch} from 'react-redux';
import XpaddleUploader from '~/components/Onnx2PaddleUpload';
import Paddle2OnnxUpload from '~/components/Paddle2OnnxUpload';
import type {BlobResponse} from '~/utils/fetch'; import type {BlobResponse} from '~/utils/fetch';
import Button from '~/components/Button'; import Buttons from '~/components/Button';
import Checkbox from '~/components/Checkbox'; import Checkbox from '~/components/Checkbox';
import Content from '~/components/Content'; import Content from '~/components/ContentXpaddle';
import Field from '~/components/Field'; import Field from '~/components/Field';
import HashLoader from 'react-spinners/HashLoader'; import HashLoader from 'react-spinners/HashLoader';
import ModelPropertiesDialog from '~/components/GraphPage/ModelPropertiesDialog'; import ModelPropertiesDialog from '~/components/GraphPage/ModelPropertiesDialog';
...@@ -42,7 +44,7 @@ import styled from 'styled-components'; ...@@ -42,7 +44,7 @@ import styled from 'styled-components';
import useRequest from '~/hooks/useRequest'; import useRequest from '~/hooks/useRequest';
import {useTranslation} from 'react-i18next'; import {useTranslation} from 'react-i18next';
const FullWidthButton = styled(Button)` const FullWidthButton = styled(Buttons)`
width: 100%; width: 100%;
`; `;
...@@ -87,15 +89,35 @@ const Loading = styled.div` ...@@ -87,15 +89,35 @@ const Loading = styled.div`
`; `;
type GraphProps = { type GraphProps = {
changeName: (name: string) => void; changeName: (name: string) => void;
changeFlags: (flag: boolean) => void;
changeFiles2?: (file: any) => void;
show?: boolean; show?: boolean;
changeshowdata?: () => void; changeshowdata?: () => void;
downloadEvent?: (baseId: number, fileName: string) => void;
changeLoading?: (value: any) => void;
Xpaddlae?: boolean; Xpaddlae?: boolean;
ModelValue?: number;
}; };
type pageRef = { type pageRef = {
files: FileList | File[] | null; files: FileList | File[] | null;
setnewfiles: () => void;
setNodeDocumentations: () => void; setNodeDocumentations: () => void;
}; };
const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata, Xpaddlae, show = true}, ref) => { const Graph = React.forwardRef<pageRef, GraphProps>(
(
{
changeName,
changeshowdata,
Xpaddlae,
show = true,
changeFlags,
changeLoading,
changeFiles2,
downloadEvent,
ModelValue
},
ref
) => {
const {t} = useTranslation(['graph', 'common']); const {t} = useTranslation(['graph', 'common']);
const storeDispatch = useDispatch(); const storeDispatch = useDispatch();
...@@ -110,10 +132,16 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -110,10 +132,16 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
const name = f[0].name.substring(f[0].name.lastIndexOf('.') + 1); const name = f[0].name.substring(f[0].name.lastIndexOf('.') + 1);
changeName && changeName(name); changeName && changeName(name);
setFiles(f); setFiles(f);
changeFlags(true);
changeshowdata && changeshowdata(); changeshowdata && changeshowdata();
}, },
[storeDispatch] [storeDispatch]
); );
const newsetfiles = (f: FileList | File[]) => {
// changeFlags(false);
setRendered(false);
setFiles(f);
};
const onClickFile = useCallback(() => { const onClickFile = useCallback(() => {
if (file.current) { if (file.current) {
file.current.value = ''; file.current.value = '';
...@@ -171,7 +199,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -171,7 +199,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
const [nodeData, setNodeData] = useState<Properties | null>(null); const [nodeData, setNodeData] = useState<Properties | null>(null);
const [nodeDocumentation, setNodeDocumentation] = useState<Documentation | null>(null); const [nodeDocumentation, setNodeDocumentation] = useState<Documentation | null>(null);
const [renderedflag3, setRenderedflag3] = useState(true); const [renderedflag3, setRenderedflag3] = useState(true);
const [rendered, setRendered] = useState(false);
useEffect(() => { useEffect(() => {
setSearch(''); setSearch('');
setSearchResult({text: '', result: []}); setSearchResult({text: '', result: []});
...@@ -184,6 +212,23 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -184,6 +212,23 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
setNodeData(null); setNodeData(null);
} }
}, [show]); }, [show]);
useEffect(() => {
setFiles(undefined);
}, [ModelValue]);
// useEffect(() => {
// if (nodeData && renderedflag3) {
// debugger;
// changeFlags(false);
// }
// }, [nodeData, renderedflag3]);
useEffect(() => {
if (rendered) {
// debugger;
// if ()
changeFlags(true);
changeLoading && changeLoading(false);
}
}, [rendered]);
const bottom = useMemo( const bottom = useMemo(
() => () =>
searching ? null : ( searching ? null : (
...@@ -194,9 +239,12 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -194,9 +239,12 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
[t, onClickFile, searching] [t, onClickFile, searching]
); );
const [rendered, setRendered] = useState(false);
useImperativeHandle(ref, () => ({ useImperativeHandle(ref, () => ({
files, files,
setnewfiles: () => {
// debugger;
setFiles(undefined);
},
setNodeDocumentations: () => { setNodeDocumentations: () => {
setRenderedflag3(false); setRenderedflag3(false);
} }
...@@ -208,7 +256,13 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -208,7 +256,13 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
if (nodeDocumentation) { if (nodeDocumentation) {
return ( return (
<Aside width={rem(360)}> <Aside width={rem(360)}>
<NodeDocumentationSidebar data={nodeDocumentation} onClose={() => setNodeDocumentation(null)} /> <NodeDocumentationSidebar
data={nodeDocumentation}
onClose={() => {
changeFlags(true);
setNodeDocumentation(null);
}}
/>
</Aside> </Aside>
); );
} }
...@@ -218,14 +272,19 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -218,14 +272,19 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
<Aside width={rem(360)}> <Aside width={rem(360)}>
<NodePropertiesSidebar <NodePropertiesSidebar
data={nodeData} data={nodeData}
onClose={() => setNodeData(null)} onClose={() => {
changeFlags(true);
setNodeData(null);
}}
showNodeDocumentation={() => graph.current?.showNodeDocumentation(nodeData)} showNodeDocumentation={() => graph.current?.showNodeDocumentation(nodeData)}
/> />
</Aside> </Aside>
); );
} }
return ( return (
<Aside bottom={bottom}> // <Aside bottom={bottom}>
<Aside>
<SearchSection> <SearchSection>
<Search <Search
text={search} text={search}
...@@ -246,7 +305,11 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -246,7 +305,11 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
{modelGraphs.length > 1 && ( {modelGraphs.length > 1 && (
<AsideSection> <AsideSection>
<Field label={t('graph:subgraph')}> <Field label={t('graph:subgraph')}>
<FullWidthSelect list={modelGraphs} value={selectedGraph} onChange={changeGraph} /> <FullWidthSelect
list={modelGraphs}
value={selectedGraph}
onChange={changeGraph}
/>
</Field> </Field>
</AsideSection> </AsideSection>
)} )}
...@@ -280,12 +343,12 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -280,12 +343,12 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
<AsideSection> <AsideSection>
<Field label={t('graph:export-file')}> <Field label={t('graph:export-file')}>
<ExportButtonWrapper> <ExportButtonWrapper>
<Button onClick={() => graph.current?.export('png')}> <Buttons onClick={() => graph.current?.export('png')}>
{t('graph:export-png')} {t('graph:export-png')}
</Button> </Buttons>
<Button onClick={() => graph.current?.export('svg')}> <Buttons onClick={() => graph.current?.export('svg')}>
{t('graph:export-svg')} {t('graph:export-svg')}
</Button> </Buttons>
</ExportButtonWrapper> </ExportButtonWrapper>
</Field> </Field>
</AsideSection> </AsideSection>
...@@ -314,15 +377,41 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -314,15 +377,41 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
nodeDocumentation, nodeDocumentation,
renderedflag3 renderedflag3
]); ]);
const uploader = useMemo( // const uploader = useMemo(
() => <Uploader onClickUpload={onClickFile} onDropFiles={setModelFile} Xpaddlae={Xpaddlae} />, // () => <Uploader onClickUpload={onClickFile} onDropFiles={setModelFile} Xpaddlae={Xpaddlae} />,
[onClickFile, setModelFile] // [onClickFile, setModelFile]
// );
// const buttonItemLayout = formLayout === 'horizontal' ? {wrapperCol: {span: 14, offset: 4}} : null;
const uploader = useMemo(() => {
if (ModelValue === 1) {
return (
<Paddle2OnnxUpload
changeLoading={changeLoading}
downloadEvent={downloadEvent}
setFiles={newsetfiles}
changeFiles2={changeFiles2}
></Paddle2OnnxUpload>
);
} else {
return (
<XpaddleUploader
changeLoading={changeLoading}
downloadEvent={downloadEvent}
setFiles={newsetfiles}
changeFiles2={changeFiles2}
></XpaddleUploader>
); );
}
}, [ModelValue]);
const flags = files && show;
console.log('flags', flags, aside);
const nodeShows = (nodeData && renderedflag3) || nodeDocumentation;
const nodeShow = nodeShows ? true : false;
return ( return (
<> <>
<Title>{t('common:graph')}</Title> <Title>{t('common:graph')}</Title>
<ModelPropertiesDialog data={modelData} onClose={() => setModelData(null)} /> <ModelPropertiesDialog data={modelData} onClose={() => setModelData(null)} />
<Content aside={aside}> <Content show={show} nodeShow={nodeShow} aside={flags ? aside : null}>
{loading ? ( {loading ? (
<Loading> <Loading>
<HashLoader size="60px" color={primaryColor} /> <HashLoader size="60px" color={primaryColor} />
...@@ -336,7 +425,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -336,7 +425,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
showInitializers={showInitializers} showInitializers={showInitializers}
showNames={showNames} showNames={showNames}
horizontal={horizontal} horizontal={horizontal}
onRendered={() => setRendered(true)} onRendered={flag => setRendered(flag)}
onOpened={setOpenedModel} onOpened={setOpenedModel}
onSearch={data => { onSearch={data => {
setSearchResult(data); setSearchResult(data);
...@@ -361,6 +450,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata ...@@ -361,6 +450,7 @@ const Graph = React.forwardRef<pageRef, GraphProps>(({changeName, changeshowdata
</Content> </Content>
</> </>
); );
}); }
);
export default Graph; export default Graph;
...@@ -18,8 +18,10 @@ import type {TFunction} from 'i18next'; ...@@ -18,8 +18,10 @@ import type {TFunction} from 'i18next';
import i18next from 'i18next'; import i18next from 'i18next';
import queryString from 'query-string'; import queryString from 'query-string';
import {toast} from 'react-toastify'; import {toast} from 'react-toastify';
import axios from 'axios';
const API_TOKEN_KEY: string = import.meta.env.SNOWPACK_PUBLIC_API_TOKEN_KEY; const API_TOKEN_KEY: string = import.meta.env.SNOWPACK_PUBLIC_API_TOKEN_KEY;
const API_URL: string = import.meta.env.SNOWPACK_PUBLIC_API_URL; export const API_URL: string = import.meta.env.SNOWPACK_PUBLIC_API_URL;
console.log('API_URL', API_TOKEN_KEY); console.log('API_URL', API_TOKEN_KEY);
const API_TOKEN_HEADER = 'X-VisualDL-Instance-ID'; const API_TOKEN_HEADER = 'X-VisualDL-Instance-ID';
...@@ -89,7 +91,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit): ...@@ -89,7 +91,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit):
// res = await fetch('http://10.181.196.14:8040/app/api/deploy/convert?format=onnx', addApiToken(options)); // res = await fetch('http://10.181.196.14:8040/app/api/deploy/convert?format=onnx', addApiToken(options));
res = await fetch(API_URL + url, addApiToken(options)); res = await fetch(API_URL + url, addApiToken(options));
console.log('ressponse', res);
} catch (e) { } catch (e) {
const t = await logErrorAndReturnT(e); const t = await logErrorAndReturnT(e);
throw new Error(t('errors:network-error')); throw new Error(t('errors:network-error'));
...@@ -131,7 +132,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit): ...@@ -131,7 +132,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit):
} else { } else {
let data: Blob; let data: Blob;
try { try {
console.log('datas', res);
data = await res.blob(); data = await res.blob();
} catch (e) { } catch (e) {
const t = await logErrorAndReturnT(e); const t = await logErrorAndReturnT(e);
...@@ -140,7 +140,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit): ...@@ -140,7 +140,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit):
const disposition = res.headers.get('Content-Disposition'); const disposition = res.headers.get('Content-Disposition');
// support safari // support safari
if (!data.arrayBuffer) { if (!data.arrayBuffer) {
console.log('arrayBuffer', data);
data.arrayBuffer = async () => data.arrayBuffer = async () =>
new Promise<ArrayBuffer>((resolve, reject) => { new Promise<ArrayBuffer>((resolve, reject) => {
const fileReader = new FileReader(); const fileReader = new FileReader();
...@@ -150,7 +149,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit): ...@@ -150,7 +149,6 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit):
fileReader.readAsArrayBuffer(data); fileReader.readAsArrayBuffer(data);
}); });
} }
console.log('datas', data);
let filename: string | null = null; let filename: string | null = null;
if (disposition && disposition.indexOf('attachment') !== -1) { if (disposition && disposition.indexOf('attachment') !== -1) {
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(disposition); const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(disposition);
...@@ -162,6 +160,63 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit): ...@@ -162,6 +160,63 @@ export async function fetcher<T = unknown>(url: string, options?: RequestInit):
} }
} }
export async function axios_fetcher<T = unknown>(url: string, options?: RequestInit, config?: object): Promise<BlobResponse | string | T> {
let res: any;
try {
if (options!.method==="POST"){
res = await axios.post(API_URL + url, options!.body, config);
} else if(options!.method==="GET"){
res = await axios.get(API_URL + url, config);
}else{
res = await axios(API_URL + url);
}
} catch (e) {
const t = await logErrorAndReturnT(e);
throw new Error(t('errors:network-error'));
}
const contentType = res.headers.get('content-type') ?? '';
if (contentType.includes('application/json')) {
let response: Data<T> | T;
try {
response = res.data;
} catch (e) {
const t = await logErrorAndReturnT(e);
throw new Error(t('errors:parse-error'));
}
if (response && 'status' in response) {
if (response.status !== 0) {
const t = await logErrorAndReturnT(response);
toast.error((response as ErrorData).msg);
throw new Error((response as ErrorData).msg || t('errors:error'));
} else {
return (response as SuccessData<T>).data;
}
}
return response;
} else if (contentType.startsWith('text/')) {
let response: string;
try {
response = res.data;
} catch (e) {
const t = await logErrorAndReturnT(e);
throw new Error(t('errors:parse-error'));
}
return response;
} else {
let data: any;
data = res.data;
let filename: string | null = null;
const disposition = res.headers.get('Content-Disposition');
if (disposition && disposition.indexOf('attachment') !== -1) {
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(disposition);
if (matches != null && matches[1]) {
filename = matches[1].replace(/['"]/g, '');
}
}
return {data, type: res.headers.get('Content-Type'), filename};
}
}
export const cycleFetcher = async <T = unknown>(urls: string[], options?: RequestInit): Promise<T[]> => { export const cycleFetcher = async <T = unknown>(urls: string[], options?: RequestInit): Promise<T[]> => {
return await Promise.all(urls.map(url => fetcher<T>(url, options))); return await Promise.all(urls.map(url => fetcher<T>(url, options)));
}; };
/**
* Copyright 2020 Baidu Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// cSpell:disable
import graph from '../../../assets/graph/yolov3.cfg';
// import onnx from '../../../assets/graph/mobilenetv2-7-0.onnx';
export default async () => {
const result = await fetch(graph);
// const result = await fetch(onnx);
console.log('result', result);
return new Response(await result.arrayBuffer(), {
status: 200,
headers: {
'Content-Type': 'application/octet-stream',
'Content-Disposition': 'attachment; filename="yolov3.cfg"'
}
});
};
/**
* Copyright 2020 Baidu Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// cSpell:disable
import graph from '../../../assets/graph/yolov3.cfg';
// import onnx from '../../../assets/graph/mobilenetv2-7-0.onnx';
export default async () => {
const result = await fetch(graph);
// const result = await fetch(onnx);
console.log('result', result);
return new Response(await result.arrayBuffer(), {
status: 200,
headers: {
'Content-Type': 'application/octet-stream',
'Content-Disposition': 'attachment; filename="yolov3.cfg"'
}
});
};
...@@ -20,6 +20,7 @@ import os ...@@ -20,6 +20,7 @@ import os
import shutil import shutil
import tempfile import tempfile
import paddle
import paddle2onnx import paddle2onnx
from flask import request from flask import request
from x2paddle.convert import onnx2paddle from x2paddle.convert import onnx2paddle
...@@ -85,17 +86,7 @@ class ModelConvertApi(object): ...@@ -85,17 +86,7 @@ class ModelConvertApi(object):
hl.update(data) hl.update(data)
identity = hl.hexdigest() identity = hl.hexdigest()
result['request_id'] = identity result['request_id'] = identity
# check whether model has been transfromed before
# if model has been transformed before, data is stored at bos
pdmodel_filename = 'bos://{}/onnx2paddle/{}/model.pdmodel'.format(
self.bucket_name, identity)
if self.bos_client.exists(pdmodel_filename):
remote_data = self.bos_client.read_file(pdmodel_filename)
if remote_data: # we should check data is not empty,
# in case convertion failed but empty data is still uploaded before due to unknown reasons
model_encoded = base64.b64encode(remote_data).decode('utf-8')
result['model'] = model_encoded
return result
target_path = os.path.join(X2PADDLE_CACHE_PATH, 'onnx2paddle', target_path = os.path.join(X2PADDLE_CACHE_PATH, 'onnx2paddle',
identity) identity)
if not os.path.exists(target_path): if not os.path.exists(target_path):
...@@ -111,9 +102,13 @@ class ModelConvertApi(object): ...@@ -111,9 +102,13 @@ class ModelConvertApi(object):
) )
try: try:
if convert_to_lite is False: if convert_to_lite is False:
with paddle.fluid.dygraph.guard():
onnx2paddle( onnx2paddle(
fp.name, target_path, convert_to_lite=convert_to_lite) fp.name,
target_path,
convert_to_lite=convert_to_lite)
else: else:
with paddle.fluid.dygraph.guard():
onnx2paddle( onnx2paddle(
fp.name, fp.name,
target_path, target_path,
...@@ -138,7 +133,7 @@ class ModelConvertApi(object): ...@@ -138,7 +133,7 @@ class ModelConvertApi(object):
filename = 'bos://{}/onnx2paddle/{}.tar'.format( filename = 'bos://{}/onnx2paddle/{}.tar'.format(
self.bucket_name, identity) self.bucket_name, identity)
try: try:
self.bos_client.write(filename, data) self.bos_client.write(filename, data, append=False)
except Exception as e: except Exception as e:
print( print(
"Exception: Write file {}.tar to bos failed, due to {}" "Exception: Write file {}.tar to bos failed, due to {}"
...@@ -161,6 +156,7 @@ class ModelConvertApi(object): ...@@ -161,6 +156,7 @@ class ModelConvertApi(object):
# delete target_path # delete target_path
shutil.rmtree(target_path) shutil.rmtree(target_path)
result['model'] = model_encoded result['model'] = model_encoded
print(len(model_encoded))
return result return result
@result('application/octet-stream') @result('application/octet-stream')
...@@ -177,6 +173,7 @@ class ModelConvertApi(object): ...@@ -177,6 +173,7 @@ class ModelConvertApi(object):
raise RuntimeError( raise RuntimeError(
"The requested model can not be downloaded due to not existing or convertion failed." "The requested model can not be downloaded due to not existing or convertion failed."
) )
print(len(data))
return data return data
@result() @result()
...@@ -194,6 +191,7 @@ class ModelConvertApi(object): ...@@ -194,6 +191,7 @@ class ModelConvertApi(object):
opset_version = int(opset_version) opset_version = int(opset_version)
except Exception: except Exception:
opset_version = 11 opset_version = 11
if deploy_backend not in ['onnxruntime', 'tensorrt', 'others']: if deploy_backend not in ['onnxruntime', 'tensorrt', 'others']:
deploy_backend = 'onnxruntime' deploy_backend = 'onnxruntime'
...@@ -202,17 +200,6 @@ class ModelConvertApi(object): ...@@ -202,17 +200,6 @@ class ModelConvertApi(object):
hl.update(model_data + param_data) hl.update(model_data + param_data)
identity = hl.hexdigest() identity = hl.hexdigest()
result['request_id'] = identity result['request_id'] = identity
# check whether model has been transfromed before
# if model has been transformed before, data is stored at bos
model_filename = 'bos://{}/paddle2onnx/{}/model.onnx'.format(
self.bucket_name, identity)
if self.bos_client.exists(model_filename):
remote_data = self.bos_client.read_file(model_filename)
if remote_data: # we should check data is not empty,
# in case convertion failed but empty data is still uploaded before due to unknown reasons
model_encoded = base64.b64encode(remote_data).decode('utf-8')
result['model'] = model_encoded
return result
with tempfile.NamedTemporaryFile() as model_fp: with tempfile.NamedTemporaryFile() as model_fp:
with tempfile.NamedTemporaryFile() as param_fp: with tempfile.NamedTemporaryFile() as param_fp:
...@@ -242,7 +229,8 @@ class ModelConvertApi(object): ...@@ -242,7 +229,8 @@ class ModelConvertApi(object):
model_encoded = None model_encoded = None
if onnx_model: if onnx_model:
try: try:
self.bos_client.write(filename, onnx_model) self.bos_client.write(
filename, onnx_model, append=False)
except Exception as e: except Exception as e:
print( print(
"Exception: Write file {}/model.onnx to bos failed, due to {}" "Exception: Write file {}/model.onnx to bos failed, due to {}"
...@@ -250,6 +238,7 @@ class ModelConvertApi(object): ...@@ -250,6 +238,7 @@ class ModelConvertApi(object):
model_encoded = base64.b64encode(onnx_model).decode( model_encoded = base64.b64encode(onnx_model).decode(
'utf-8') 'utf-8')
result['model'] = model_encoded result['model'] = model_encoded
print(len(model_encoded))
return result return result
@result('application/octet-stream') @result('application/octet-stream')
...@@ -266,6 +255,7 @@ class ModelConvertApi(object): ...@@ -266,6 +255,7 @@ class ModelConvertApi(object):
raise RuntimeError( raise RuntimeError(
"The requested model can not be downloaded due to not existing or convertion failed." "The requested model can not be downloaded due to not existing or convertion failed."
) )
print(len(data))
return data return data
......
...@@ -471,16 +471,30 @@ class BosFileSystem(object): ...@@ -471,16 +471,30 @@ class BosFileSystem(object):
self._file_contents_count = 0 self._file_contents_count = 0
self._start_append_time = time.time() self._start_append_time = time.time()
def write(self, filename, file_content, binary_mode=False): def write(self, filename, file_content, binary_mode=False, append=True):
if append:
self.append(filename, file_content, binary_mode=False) self.append(filename, file_content, binary_mode=False)
else:
# bucket_name, object_key = BosFileSystem._get_object_info(filename) bucket_name, object_key = get_object_info(filename)
# try:
# self.bos_client.append_object(bucket_name=bucket_name, self.bos_client.put_object(
# key=object_key, bucket_name=bucket_name,
# data=file_content, key=object_key,
# content_md5=content_md5(file_content), data=file_content,
# content_length=len(file_content)) content_length=len(file_content),
content_md5=content_md5(file_content))
except (exception.BceServerError,
exception.BceHttpClientError) as e: # sts token invalid
if bucket_name == 'visualdl-server': # only sts token from visualdl-server, we can renew automatically
self.renew_bos_client_from_server()
self.bos_client.put_object(
bucket_name=bucket_name,
key=object_key,
data=file_content,
content_length=len(file_content),
content_md5=content_md5(file_content))
else:
raise e # user defined bos token, we have no idea to renew the token, so throw the exception
def walk(self, dir): def walk(self, dir):
class WalkGenerator(): class WalkGenerator():
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册