未验证 提交 e33f6848 编写于 作者: P Peter Pan 提交者: GitHub

nan values won't break charts down now (#984)

* chore: update dependencies

* fix(scalar): nan values break charts down
上级 50fab2de
......@@ -31,11 +31,13 @@
"build": "./scripts/build.sh",
"build:core": "yarn workspace @visualdl/core build",
"build:demo": "yarn workspace @visualdl/demo build",
"build:wasm": "yarn workspace @visualdl/wasm build",
"clean": "rimraf output packages/*/dist packages/wasm/target",
"dev": "yarn dev:core",
"dev:core": "yarn workspace @visualdl/core dev",
"dev:demo": "yarn workspace @visualdl/server dev:demo",
"dev:server": "yarn workspace @visualdl/server dev",
"dev:wasm": "yarn workspace @visualdl/wasm dev",
"lint": "eslint --ext .tsx,.jsx.ts,.js,.mjs .",
"format": "prettier --write \"**/*.{ts,tsx,js,jsx}\"",
"test": "yarn workspaces run test",
......@@ -44,8 +46,8 @@
"version": "yarn format && git add -A"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "4.26.0",
"@typescript-eslint/parser": "4.26.0",
"@typescript-eslint/eslint-plugin": "4.26.1",
"@typescript-eslint/parser": "4.26.1",
"eslint": "7.28.0",
"eslint-config-prettier": "8.3.0",
"eslint-plugin-license-header": "0.2.0",
......
......@@ -95,30 +95,30 @@
"@types/chai": "4.2.18",
"@types/d3": "6.7.0",
"@types/d3-format": "2.0.0",
"@types/echarts": "4.9.7",
"@types/echarts": "4.9.8",
"@types/file-saver": "2.0.2",
"@types/lodash": "4.14.170",
"@types/mime-types": "2.1.0",
"@types/nprogress": "0.2.0",
"@types/numeric": "1.2.1",
"@types/react": "17.0.9",
"@types/react-dom": "17.0.6",
"@types/react": "17.0.10",
"@types/react-dom": "17.0.7",
"@types/react-helmet": "6.1.1",
"@types/react-rangeslider": "2.2.3",
"@types/react-redux": "7.1.16",
"@types/react-router-dom": "5.1.7",
"@types/react-table": "7.7.1",
"@types/snowpack-env": "2.3.3",
"@types/styled-components": "5.1.9",
"@types/styled-components": "5.1.10",
"@types/three": "0.129.1",
"@visualdl/mock": "2.2.0-1",
"@web/test-runner": "0.13.5",
"@web/test-runner": "0.13.6",
"chai": "4.3.4",
"chalk": "4.1.1",
"dotenv": "10.0.0",
"enhanced-resolve": "5.8.2",
"html-minifier": "4.0.0",
"snowpack": "3.5.5",
"snowpack": "3.5.6",
"snowpack-plugin-copy": "1.0.1",
"typescript": "4.3.2"
},
......
......@@ -85,7 +85,14 @@ const ScalarChart: FunctionComponent<ScalarChartProps> = ({
const xAxisType = useMemo(() => (xAxis === XAxis.WallTime ? XAxisType.time : XAxisType.value), [xAxis]);
const transformParams = useMemo(() => [datasets?.map(data => data ?? []) ?? [], smoothing], [datasets, smoothing]);
const transformParams = useMemo(
() => [
datasets?.map(data => data?.map(row => [row[0], row[1], Number.isFinite(row[2]) ? row[2] : null]) ?? []) ??
[],
smoothing
],
[datasets, smoothing]
);
const {data: smoothedDatasetsOrUndefined} = useWebAssembly<Dataset[]>('scalar_transform', transformParams);
const smoothedDatasets = useMemo<NonNullable<typeof smoothedDatasetsOrUndefined>>(
() => smoothedDatasetsOrUndefined ?? [],
......
......@@ -158,7 +158,7 @@ export const tooltip = (data: TooltipData[], stepLength: number, i18n: typeof I1
],
data: data.map(({min, max, item}) => [
valueFormatter(item[3] ?? Number.NaN),
valueFormatter(item[2] ?? Number.NaN),
valueFormatter(Number.isFinite(item[2]) ? (item[2] as number) : Number.NaN),
item[1],
valueFormatter(min ?? Number.NaN),
valueFormatter(max ?? Number.NaN),
......
......@@ -16,7 +16,7 @@
// cSpell:words quantile accum debias exponentiated
import type {Dataset, ScalarDataset} from './types';
import type {Dataset, ScalarDataset, Value} from './types';
import BigNumber from 'bignumber.js';
import type {Run} from '~/types';
......@@ -34,13 +34,16 @@ export const transform = ({datasets, smoothing}: {datasets: ScalarDataset[]; smo
let startValue = 0;
const bigSmoothing = new BigNumber(smoothing);
data.forEach((d, i) => {
const nextVal = new BigNumber(d[2]);
const millisecond = (d[0] = Math.floor(d[0]));
if (i === 0) {
startValue = millisecond;
}
// relative time in millisecond.
d[4] = Math.floor(millisecond - startValue);
if (!Number.isFinite(d[2])) {
d[3] = null;
} else {
const nextVal = new BigNumber(d[2] as number);
if (!nextVal.isFinite()) {
d[3] = nextVal.toNumber();
} else {
......@@ -55,13 +58,14 @@ export const transform = ({datasets, smoothing}: {datasets: ScalarDataset[]; smo
// d[3] = last / debiasWeight;
d[3] = last.dividedBy(debiasWeight).toNumber();
}
}
});
return data;
});
export const singlePointRange = (value: number) => ({
min: value ? Math.min(value * 2, 0) : -0.5,
max: value ? Math.max(value * 2, 0) : 0.5
export const singlePointRange = (value: Value) => ({
min: Number.isFinite(value) ? Math.min((value as number) * 2, 0) : -0.5,
max: Number.isFinite(value) ? Math.max((value as number) * 2, 0) : 0.5
});
export const range = ({datasets}: {datasets: Dataset[]}) => {
......@@ -72,7 +76,7 @@ export const range = ({datasets}: {datasets: Dataset[]}) => {
max: Number.NaN
};
}
const values = dataset.map(v => v[2]);
const values = dataset.map(v => v[2]).filter(Number.isFinite) as number[];
return {
min: Math.min(...values) ?? Number.NaN,
max: Math.max(...values) ?? Number.NaN
......@@ -86,7 +90,7 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo
if (dataset.length === 0) {
return void 0;
}
const values = dataset.map(v => v[2]);
const values = dataset.map(v => v[2]).filter(Number.isFinite) as number[];
if (!outlier) {
// Get the origin data range.
return {
......@@ -95,7 +99,10 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo
};
} else {
// Get the quantile range.
const sorted = dataset.map(v => v[2]).sort();
const sorted = dataset
.map(v => v[2])
.filter(Number.isFinite)
.sort() as number[];
return {
min: quantile(sorted, 0.05),
max: quantile(values, 0.95)
......@@ -122,10 +129,13 @@ export const nearestPoint = (data: Dataset[], runs: Run[], idx: number, value: n
let d = Number.POSITIVE_INFINITY;
let dv = value;
for (let i = 0; i < series.length; i++) {
const dd = Math.abs(series[i][idx] - value);
const v = series[i][idx];
if (Number.isFinite(v)) {
const dd = Math.abs((v as number) - value);
if (d > dd) {
d = dd;
dv = series[i][idx];
dv = v as number;
}
}
}
result.push(...series.filter(s => s[idx] === dv).map(item => ({run, item})));
......
......@@ -31,7 +31,8 @@ export const sortingMethodMap: Record<SM, (points: TooltipData[], data: number[]
[SM.Descending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]).reverse(),
[SM.Ascending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]),
// Compare other points width the trigger point, calculate the nearest sort.
[SM.Nearest]: (points: TooltipData[], data: number[]) => sortBy(points, point => point.item[3] - data[2])
[SM.Nearest]: (points: TooltipData[], data: number[]) =>
sortBy(points, point => (point.item[3] ?? Number.NaN) - data[2])
} as const;
export type {Dataset, ScalarDataset, Range, TooltipData} from './types';
......
......@@ -18,10 +18,11 @@ import {Run, TimeMode} from '~/types';
export type {Range} from '~/types';
type Value = number;
type InvalidValue = 'NaN' | 'Inf' | '-Inf';
export type Value = number | null | InvalidValue;
type WallTime = number;
type Step = number;
type Smoothed = number;
type Smoothed = number | null;
type Relative = number;
export type Dataset = [WallTime, Step, Value, Smoothed, Relative][];
......
......@@ -35,7 +35,7 @@
"devDependencies": {
"@types/express": "4.17.12",
"@types/mkdirp": "1.0.1",
"@types/node": "15.12.1",
"@types/node": "15.12.2",
"@types/node-fetch": "2.5.10",
"@types/rimraf": "3.0.0",
"cpy-cli": "3.1.1",
......
......@@ -52,7 +52,7 @@
"sass-loader": "12.0.0",
"terser": "5.7.0",
"webpack": "5.38.1",
"webpack-cli": "4.7.0"
"webpack-cli": "4.7.2"
},
"engines": {
"node": ">=12",
......
......@@ -41,12 +41,12 @@
"enhanced-resolve": "5.8.2",
"express": "4.17.1",
"http-proxy-middleware": "2.0.0",
"pm2": "4.5.6"
"pm2": "5.0.4"
},
"devDependencies": {
"@types/enhanced-resolve": "3.0.6",
"@types/express": "4.17.12",
"@types/node": "15.12.1",
"@types/node": "15.12.2",
"@visualdl/mock": "2.2.0-1",
"cross-env": "7.0.3",
"nodemon": "2.0.7",
......
......@@ -33,6 +33,7 @@
"types": "dist/index.d.ts",
"scripts": {
"build": "wasm-pack build --release --out-dir dist --out-name index --target web .",
"dev": "wasm-pack build --dev --out-dir dist --out-name index --target web .",
"test": "echo \"Error: no test specified\" && exit 0"
},
"devDependencies": {
......
#[derive(Serialize, Deserialize)]
pub struct Dataset(f64, i64, f64);
pub struct Dataset(f64, i64, Option<f64>);
#[derive(Serialize, Deserialize)]
pub struct Smoothed(i64, i64, f64, f64, i64);
pub struct Smoothed(i64, i64, Option<f64>, Option<f64>, i64);
#[derive(Serialize, Deserialize)]
pub struct Range {
......@@ -33,6 +33,13 @@ fn quantile(values: &Vec<f64>, p: f64) -> f64 {
return value0 + (value1 - value0) * (i - (i0 as f64));
}
fn sort_values(data: &Vec<Smoothed>) -> (Vec<f64>, Vec<f64>) {
let values: Vec<f64> = data.iter().filter_map(|x| x.2).collect();
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
(sorted, values)
}
pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smoothed>> {
let mut result: Vec<Vec<Smoothed>> = vec![];
for dataset in datasets.iter() {
......@@ -44,8 +51,7 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth
let mut num_accum: i32 = 0;
let mut start_value: i64 = 0;
for (i, d) in dataset.iter().enumerate() {
let mut r: Smoothed = Smoothed(0, d.1, d.2, 0.0, 0);
let next_val: f64 = d.2;
let mut r: Smoothed = Smoothed(0, d.1, d.2, Some(0.0), 0);
// second to millisecond.
let millisecond: i64 = d.0.floor() as i64;
r.0 = millisecond;
......@@ -54,8 +60,9 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth
}
// Relative time in millisecond.
r.4 = millisecond - start_value;
if next_val.is_infinite() {
r.3 = next_val;
if let Some(next_val) = d.2 {
if next_val.is_infinite() || next_val.is_nan() {
r.3 = Some(next_val);
} else {
last = last * smoothing + (1.0 - smoothing) * next_val;
num_accum += 1;
......@@ -63,7 +70,10 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth
if smoothing != 1.0 {
debias_weight = (1.0_f64 - smoothing.powi(num_accum)).into();
}
r.3 = last / debias_weight;
r.3 = Some(last / debias_weight);
}
} else {
r.3 = None;
}
row.push(r);
}
......@@ -76,17 +86,12 @@ pub fn range(datasets: &Vec<Vec<Smoothed>>) -> Vec<Range> {
let mut ranges: Vec<Range> = vec![];
for data in datasets.iter() {
let n: usize = data.len();
if n == 0 {
if data.len() == 0 {
ranges.push(Range::new(f64::NAN, f64::NAN));
}
let values: Vec<f64> = data.iter().map(|x| x.2).collect();
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
ranges.push(Range::new(sorted[0], sorted[n - 1]));
let (sorted, _) = sort_values(data);
ranges.push(Range::new(sorted[0], sorted[sorted.len() - 1]));
}
return ranges;
......@@ -96,18 +101,14 @@ pub fn axis_range(datasets: &Vec<Vec<Smoothed>>, outlier: bool) -> Range {
let mut ranges: Vec<Range> = vec![];
for data in datasets.iter() {
let n: usize = data.len();
if n == 0 {
if data.len() == 0 {
continue;
}
let values: Vec<f64> = data.iter().map(|x| x.2).collect();
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
let (sorted, values) = sort_values(data);
if !outlier {
ranges.push(Range::new(sorted[0], sorted[n - 1]));
ranges.push(Range::new(sorted[0], sorted[sorted.len() - 1]));
} else {
ranges.push(Range::new(
quantile(&sorted, 0.05_f64),
......
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册