未验证 提交 e33f6848 编写于 作者: P Peter Pan 提交者: GitHub

nan values won't break charts down now (#984)

* chore: update dependencies

* fix(scalar): nan values break charts down
上级 50fab2de
...@@ -31,11 +31,13 @@ ...@@ -31,11 +31,13 @@
"build": "./scripts/build.sh", "build": "./scripts/build.sh",
"build:core": "yarn workspace @visualdl/core build", "build:core": "yarn workspace @visualdl/core build",
"build:demo": "yarn workspace @visualdl/demo build", "build:demo": "yarn workspace @visualdl/demo build",
"build:wasm": "yarn workspace @visualdl/wasm build",
"clean": "rimraf output packages/*/dist packages/wasm/target", "clean": "rimraf output packages/*/dist packages/wasm/target",
"dev": "yarn dev:core", "dev": "yarn dev:core",
"dev:core": "yarn workspace @visualdl/core dev", "dev:core": "yarn workspace @visualdl/core dev",
"dev:demo": "yarn workspace @visualdl/server dev:demo", "dev:demo": "yarn workspace @visualdl/server dev:demo",
"dev:server": "yarn workspace @visualdl/server dev", "dev:server": "yarn workspace @visualdl/server dev",
"dev:wasm": "yarn workspace @visualdl/wasm dev",
"lint": "eslint --ext .tsx,.jsx.ts,.js,.mjs .", "lint": "eslint --ext .tsx,.jsx.ts,.js,.mjs .",
"format": "prettier --write \"**/*.{ts,tsx,js,jsx}\"", "format": "prettier --write \"**/*.{ts,tsx,js,jsx}\"",
"test": "yarn workspaces run test", "test": "yarn workspaces run test",
...@@ -44,8 +46,8 @@ ...@@ -44,8 +46,8 @@
"version": "yarn format && git add -A" "version": "yarn format && git add -A"
}, },
"devDependencies": { "devDependencies": {
"@typescript-eslint/eslint-plugin": "4.26.0", "@typescript-eslint/eslint-plugin": "4.26.1",
"@typescript-eslint/parser": "4.26.0", "@typescript-eslint/parser": "4.26.1",
"eslint": "7.28.0", "eslint": "7.28.0",
"eslint-config-prettier": "8.3.0", "eslint-config-prettier": "8.3.0",
"eslint-plugin-license-header": "0.2.0", "eslint-plugin-license-header": "0.2.0",
......
...@@ -95,30 +95,30 @@ ...@@ -95,30 +95,30 @@
"@types/chai": "4.2.18", "@types/chai": "4.2.18",
"@types/d3": "6.7.0", "@types/d3": "6.7.0",
"@types/d3-format": "2.0.0", "@types/d3-format": "2.0.0",
"@types/echarts": "4.9.7", "@types/echarts": "4.9.8",
"@types/file-saver": "2.0.2", "@types/file-saver": "2.0.2",
"@types/lodash": "4.14.170", "@types/lodash": "4.14.170",
"@types/mime-types": "2.1.0", "@types/mime-types": "2.1.0",
"@types/nprogress": "0.2.0", "@types/nprogress": "0.2.0",
"@types/numeric": "1.2.1", "@types/numeric": "1.2.1",
"@types/react": "17.0.9", "@types/react": "17.0.10",
"@types/react-dom": "17.0.6", "@types/react-dom": "17.0.7",
"@types/react-helmet": "6.1.1", "@types/react-helmet": "6.1.1",
"@types/react-rangeslider": "2.2.3", "@types/react-rangeslider": "2.2.3",
"@types/react-redux": "7.1.16", "@types/react-redux": "7.1.16",
"@types/react-router-dom": "5.1.7", "@types/react-router-dom": "5.1.7",
"@types/react-table": "7.7.1", "@types/react-table": "7.7.1",
"@types/snowpack-env": "2.3.3", "@types/snowpack-env": "2.3.3",
"@types/styled-components": "5.1.9", "@types/styled-components": "5.1.10",
"@types/three": "0.129.1", "@types/three": "0.129.1",
"@visualdl/mock": "2.2.0-1", "@visualdl/mock": "2.2.0-1",
"@web/test-runner": "0.13.5", "@web/test-runner": "0.13.6",
"chai": "4.3.4", "chai": "4.3.4",
"chalk": "4.1.1", "chalk": "4.1.1",
"dotenv": "10.0.0", "dotenv": "10.0.0",
"enhanced-resolve": "5.8.2", "enhanced-resolve": "5.8.2",
"html-minifier": "4.0.0", "html-minifier": "4.0.0",
"snowpack": "3.5.5", "snowpack": "3.5.6",
"snowpack-plugin-copy": "1.0.1", "snowpack-plugin-copy": "1.0.1",
"typescript": "4.3.2" "typescript": "4.3.2"
}, },
......
...@@ -85,7 +85,14 @@ const ScalarChart: FunctionComponent<ScalarChartProps> = ({ ...@@ -85,7 +85,14 @@ const ScalarChart: FunctionComponent<ScalarChartProps> = ({
const xAxisType = useMemo(() => (xAxis === XAxis.WallTime ? XAxisType.time : XAxisType.value), [xAxis]); const xAxisType = useMemo(() => (xAxis === XAxis.WallTime ? XAxisType.time : XAxisType.value), [xAxis]);
const transformParams = useMemo(() => [datasets?.map(data => data ?? []) ?? [], smoothing], [datasets, smoothing]); const transformParams = useMemo(
() => [
datasets?.map(data => data?.map(row => [row[0], row[1], Number.isFinite(row[2]) ? row[2] : null]) ?? []) ??
[],
smoothing
],
[datasets, smoothing]
);
const {data: smoothedDatasetsOrUndefined} = useWebAssembly<Dataset[]>('scalar_transform', transformParams); const {data: smoothedDatasetsOrUndefined} = useWebAssembly<Dataset[]>('scalar_transform', transformParams);
const smoothedDatasets = useMemo<NonNullable<typeof smoothedDatasetsOrUndefined>>( const smoothedDatasets = useMemo<NonNullable<typeof smoothedDatasetsOrUndefined>>(
() => smoothedDatasetsOrUndefined ?? [], () => smoothedDatasetsOrUndefined ?? [],
......
...@@ -158,7 +158,7 @@ export const tooltip = (data: TooltipData[], stepLength: number, i18n: typeof I1 ...@@ -158,7 +158,7 @@ export const tooltip = (data: TooltipData[], stepLength: number, i18n: typeof I1
], ],
data: data.map(({min, max, item}) => [ data: data.map(({min, max, item}) => [
valueFormatter(item[3] ?? Number.NaN), valueFormatter(item[3] ?? Number.NaN),
valueFormatter(item[2] ?? Number.NaN), valueFormatter(Number.isFinite(item[2]) ? (item[2] as number) : Number.NaN),
item[1], item[1],
valueFormatter(min ?? Number.NaN), valueFormatter(min ?? Number.NaN),
valueFormatter(max ?? Number.NaN), valueFormatter(max ?? Number.NaN),
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
// cSpell:words quantile accum debias exponentiated // cSpell:words quantile accum debias exponentiated
import type {Dataset, ScalarDataset} from './types'; import type {Dataset, ScalarDataset, Value} from './types';
import BigNumber from 'bignumber.js'; import BigNumber from 'bignumber.js';
import type {Run} from '~/types'; import type {Run} from '~/types';
...@@ -34,34 +34,38 @@ export const transform = ({datasets, smoothing}: {datasets: ScalarDataset[]; smo ...@@ -34,34 +34,38 @@ export const transform = ({datasets, smoothing}: {datasets: ScalarDataset[]; smo
let startValue = 0; let startValue = 0;
const bigSmoothing = new BigNumber(smoothing); const bigSmoothing = new BigNumber(smoothing);
data.forEach((d, i) => { data.forEach((d, i) => {
const nextVal = new BigNumber(d[2]);
const millisecond = (d[0] = Math.floor(d[0])); const millisecond = (d[0] = Math.floor(d[0]));
if (i === 0) { if (i === 0) {
startValue = millisecond; startValue = millisecond;
} }
// relative time in millisecond. // relative time in millisecond.
d[4] = Math.floor(millisecond - startValue); d[4] = Math.floor(millisecond - startValue);
if (!nextVal.isFinite()) { if (!Number.isFinite(d[2])) {
d[3] = nextVal.toNumber(); d[3] = null;
} else { } else {
// last = last * smoothing + (1 - smoothing) * nextVal; const nextVal = new BigNumber(d[2] as number);
last = last.multipliedBy(bigSmoothing).plus(bigSmoothing.minus(1).negated().multipliedBy(nextVal)); if (!nextVal.isFinite()) {
numAccum++; d[3] = nextVal.toNumber();
let debiasWeight = new BigNumber(1); } else {
if (!bigSmoothing.isEqualTo(1)) { // last = last * smoothing + (1 - smoothing) * nextVal;
//debiasWeight = 1.0 - Math.pow(smoothing, numAccum); last = last.multipliedBy(bigSmoothing).plus(bigSmoothing.minus(1).negated().multipliedBy(nextVal));
debiasWeight = bigSmoothing.exponentiatedBy(numAccum).minus(1).negated(); numAccum++;
let debiasWeight = new BigNumber(1);
if (!bigSmoothing.isEqualTo(1)) {
//debiasWeight = 1.0 - Math.pow(smoothing, numAccum);
debiasWeight = bigSmoothing.exponentiatedBy(numAccum).minus(1).negated();
}
// d[3] = last / debiasWeight;
d[3] = last.dividedBy(debiasWeight).toNumber();
} }
// d[3] = last / debiasWeight;
d[3] = last.dividedBy(debiasWeight).toNumber();
} }
}); });
return data; return data;
}); });
export const singlePointRange = (value: number) => ({ export const singlePointRange = (value: Value) => ({
min: value ? Math.min(value * 2, 0) : -0.5, min: Number.isFinite(value) ? Math.min((value as number) * 2, 0) : -0.5,
max: value ? Math.max(value * 2, 0) : 0.5 max: Number.isFinite(value) ? Math.max((value as number) * 2, 0) : 0.5
}); });
export const range = ({datasets}: {datasets: Dataset[]}) => { export const range = ({datasets}: {datasets: Dataset[]}) => {
...@@ -72,7 +76,7 @@ export const range = ({datasets}: {datasets: Dataset[]}) => { ...@@ -72,7 +76,7 @@ export const range = ({datasets}: {datasets: Dataset[]}) => {
max: Number.NaN max: Number.NaN
}; };
} }
const values = dataset.map(v => v[2]); const values = dataset.map(v => v[2]).filter(Number.isFinite) as number[];
return { return {
min: Math.min(...values) ?? Number.NaN, min: Math.min(...values) ?? Number.NaN,
max: Math.max(...values) ?? Number.NaN max: Math.max(...values) ?? Number.NaN
...@@ -86,7 +90,7 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo ...@@ -86,7 +90,7 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo
if (dataset.length === 0) { if (dataset.length === 0) {
return void 0; return void 0;
} }
const values = dataset.map(v => v[2]); const values = dataset.map(v => v[2]).filter(Number.isFinite) as number[];
if (!outlier) { if (!outlier) {
// Get the origin data range. // Get the origin data range.
return { return {
...@@ -95,7 +99,10 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo ...@@ -95,7 +99,10 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo
}; };
} else { } else {
// Get the quantile range. // Get the quantile range.
const sorted = dataset.map(v => v[2]).sort(); const sorted = dataset
.map(v => v[2])
.filter(Number.isFinite)
.sort() as number[];
return { return {
min: quantile(sorted, 0.05), min: quantile(sorted, 0.05),
max: quantile(values, 0.95) max: quantile(values, 0.95)
...@@ -122,10 +129,13 @@ export const nearestPoint = (data: Dataset[], runs: Run[], idx: number, value: n ...@@ -122,10 +129,13 @@ export const nearestPoint = (data: Dataset[], runs: Run[], idx: number, value: n
let d = Number.POSITIVE_INFINITY; let d = Number.POSITIVE_INFINITY;
let dv = value; let dv = value;
for (let i = 0; i < series.length; i++) { for (let i = 0; i < series.length; i++) {
const dd = Math.abs(series[i][idx] - value); const v = series[i][idx];
if (d > dd) { if (Number.isFinite(v)) {
d = dd; const dd = Math.abs((v as number) - value);
dv = series[i][idx]; if (d > dd) {
d = dd;
dv = v as number;
}
} }
} }
result.push(...series.filter(s => s[idx] === dv).map(item => ({run, item}))); result.push(...series.filter(s => s[idx] === dv).map(item => ({run, item})));
......
...@@ -31,7 +31,8 @@ export const sortingMethodMap: Record<SM, (points: TooltipData[], data: number[] ...@@ -31,7 +31,8 @@ export const sortingMethodMap: Record<SM, (points: TooltipData[], data: number[]
[SM.Descending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]).reverse(), [SM.Descending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]).reverse(),
[SM.Ascending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]), [SM.Ascending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]),
// Compare other points width the trigger point, calculate the nearest sort. // Compare other points width the trigger point, calculate the nearest sort.
[SM.Nearest]: (points: TooltipData[], data: number[]) => sortBy(points, point => point.item[3] - data[2]) [SM.Nearest]: (points: TooltipData[], data: number[]) =>
sortBy(points, point => (point.item[3] ?? Number.NaN) - data[2])
} as const; } as const;
export type {Dataset, ScalarDataset, Range, TooltipData} from './types'; export type {Dataset, ScalarDataset, Range, TooltipData} from './types';
......
...@@ -18,10 +18,11 @@ import {Run, TimeMode} from '~/types'; ...@@ -18,10 +18,11 @@ import {Run, TimeMode} from '~/types';
export type {Range} from '~/types'; export type {Range} from '~/types';
type Value = number; type InvalidValue = 'NaN' | 'Inf' | '-Inf';
export type Value = number | null | InvalidValue;
type WallTime = number; type WallTime = number;
type Step = number; type Step = number;
type Smoothed = number; type Smoothed = number | null;
type Relative = number; type Relative = number;
export type Dataset = [WallTime, Step, Value, Smoothed, Relative][]; export type Dataset = [WallTime, Step, Value, Smoothed, Relative][];
......
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
"devDependencies": { "devDependencies": {
"@types/express": "4.17.12", "@types/express": "4.17.12",
"@types/mkdirp": "1.0.1", "@types/mkdirp": "1.0.1",
"@types/node": "15.12.1", "@types/node": "15.12.2",
"@types/node-fetch": "2.5.10", "@types/node-fetch": "2.5.10",
"@types/rimraf": "3.0.0", "@types/rimraf": "3.0.0",
"cpy-cli": "3.1.1", "cpy-cli": "3.1.1",
......
...@@ -52,7 +52,7 @@ ...@@ -52,7 +52,7 @@
"sass-loader": "12.0.0", "sass-loader": "12.0.0",
"terser": "5.7.0", "terser": "5.7.0",
"webpack": "5.38.1", "webpack": "5.38.1",
"webpack-cli": "4.7.0" "webpack-cli": "4.7.2"
}, },
"engines": { "engines": {
"node": ">=12", "node": ">=12",
......
...@@ -41,12 +41,12 @@ ...@@ -41,12 +41,12 @@
"enhanced-resolve": "5.8.2", "enhanced-resolve": "5.8.2",
"express": "4.17.1", "express": "4.17.1",
"http-proxy-middleware": "2.0.0", "http-proxy-middleware": "2.0.0",
"pm2": "4.5.6" "pm2": "5.0.4"
}, },
"devDependencies": { "devDependencies": {
"@types/enhanced-resolve": "3.0.6", "@types/enhanced-resolve": "3.0.6",
"@types/express": "4.17.12", "@types/express": "4.17.12",
"@types/node": "15.12.1", "@types/node": "15.12.2",
"@visualdl/mock": "2.2.0-1", "@visualdl/mock": "2.2.0-1",
"cross-env": "7.0.3", "cross-env": "7.0.3",
"nodemon": "2.0.7", "nodemon": "2.0.7",
......
...@@ -33,6 +33,7 @@ ...@@ -33,6 +33,7 @@
"types": "dist/index.d.ts", "types": "dist/index.d.ts",
"scripts": { "scripts": {
"build": "wasm-pack build --release --out-dir dist --out-name index --target web .", "build": "wasm-pack build --release --out-dir dist --out-name index --target web .",
"dev": "wasm-pack build --dev --out-dir dist --out-name index --target web .",
"test": "echo \"Error: no test specified\" && exit 0" "test": "echo \"Error: no test specified\" && exit 0"
}, },
"devDependencies": { "devDependencies": {
......
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct Dataset(f64, i64, f64); pub struct Dataset(f64, i64, Option<f64>);
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct Smoothed(i64, i64, f64, f64, i64); pub struct Smoothed(i64, i64, Option<f64>, Option<f64>, i64);
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct Range { pub struct Range {
...@@ -33,6 +33,13 @@ fn quantile(values: &Vec<f64>, p: f64) -> f64 { ...@@ -33,6 +33,13 @@ fn quantile(values: &Vec<f64>, p: f64) -> f64 {
return value0 + (value1 - value0) * (i - (i0 as f64)); return value0 + (value1 - value0) * (i - (i0 as f64));
} }
fn sort_values(data: &Vec<Smoothed>) -> (Vec<f64>, Vec<f64>) {
let values: Vec<f64> = data.iter().filter_map(|x| x.2).collect();
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
(sorted, values)
}
pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smoothed>> { pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smoothed>> {
let mut result: Vec<Vec<Smoothed>> = vec![]; let mut result: Vec<Vec<Smoothed>> = vec![];
for dataset in datasets.iter() { for dataset in datasets.iter() {
...@@ -44,8 +51,7 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth ...@@ -44,8 +51,7 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth
let mut num_accum: i32 = 0; let mut num_accum: i32 = 0;
let mut start_value: i64 = 0; let mut start_value: i64 = 0;
for (i, d) in dataset.iter().enumerate() { for (i, d) in dataset.iter().enumerate() {
let mut r: Smoothed = Smoothed(0, d.1, d.2, 0.0, 0); let mut r: Smoothed = Smoothed(0, d.1, d.2, Some(0.0), 0);
let next_val: f64 = d.2;
// second to millisecond. // second to millisecond.
let millisecond: i64 = d.0.floor() as i64; let millisecond: i64 = d.0.floor() as i64;
r.0 = millisecond; r.0 = millisecond;
...@@ -54,16 +60,20 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth ...@@ -54,16 +60,20 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth
} }
// Relative time in millisecond. // Relative time in millisecond.
r.4 = millisecond - start_value; r.4 = millisecond - start_value;
if next_val.is_infinite() { if let Some(next_val) = d.2 {
r.3 = next_val; if next_val.is_infinite() || next_val.is_nan() {
} else { r.3 = Some(next_val);
last = last * smoothing + (1.0 - smoothing) * next_val; } else {
num_accum += 1; last = last * smoothing + (1.0 - smoothing) * next_val;
let mut debias_weight: f64 = 1.0_f64; num_accum += 1;
if smoothing != 1.0 { let mut debias_weight: f64 = 1.0_f64;
debias_weight = (1.0_f64 - smoothing.powi(num_accum)).into(); if smoothing != 1.0 {
debias_weight = (1.0_f64 - smoothing.powi(num_accum)).into();
}
r.3 = Some(last / debias_weight);
} }
r.3 = last / debias_weight; } else {
r.3 = None;
} }
row.push(r); row.push(r);
} }
...@@ -76,17 +86,12 @@ pub fn range(datasets: &Vec<Vec<Smoothed>>) -> Vec<Range> { ...@@ -76,17 +86,12 @@ pub fn range(datasets: &Vec<Vec<Smoothed>>) -> Vec<Range> {
let mut ranges: Vec<Range> = vec![]; let mut ranges: Vec<Range> = vec![];
for data in datasets.iter() { for data in datasets.iter() {
let n: usize = data.len(); if data.len() == 0 {
if n == 0 {
ranges.push(Range::new(f64::NAN, f64::NAN)); ranges.push(Range::new(f64::NAN, f64::NAN));
} }
let values: Vec<f64> = data.iter().map(|x| x.2).collect(); let (sorted, _) = sort_values(data);
let mut sorted: Vec<f64> = values.clone(); ranges.push(Range::new(sorted[0], sorted[sorted.len() - 1]));
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
ranges.push(Range::new(sorted[0], sorted[n - 1]));
} }
return ranges; return ranges;
...@@ -96,18 +101,14 @@ pub fn axis_range(datasets: &Vec<Vec<Smoothed>>, outlier: bool) -> Range { ...@@ -96,18 +101,14 @@ pub fn axis_range(datasets: &Vec<Vec<Smoothed>>, outlier: bool) -> Range {
let mut ranges: Vec<Range> = vec![]; let mut ranges: Vec<Range> = vec![];
for data in datasets.iter() { for data in datasets.iter() {
let n: usize = data.len(); if data.len() == 0 {
if n == 0 {
continue; continue;
} }
let values: Vec<f64> = data.iter().map(|x| x.2).collect(); let (sorted, values) = sort_values(data);
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
if !outlier { if !outlier {
ranges.push(Range::new(sorted[0], sorted[n - 1])); ranges.push(Range::new(sorted[0], sorted[sorted.len() - 1]));
} else { } else {
ranges.push(Range::new( ranges.push(Range::new(
quantile(&sorted, 0.05_f64), quantile(&sorted, 0.05_f64),
......
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册