未验证 提交 7a2bde2a 编写于 作者: K Kirill Lakhov 提交者: GitHub

Add gamma correction filter (#6771)

<!-- Raise an issue to propose your change
(https://github.com/opencv/cvat/issues).
It helps to avoid duplication of efforts from multiple independent
contributors.
Discuss your ideas with maintainers to be sure that changes will be
approved and merged.
Read the [Contribution
guide](https://opencv.github.io/cvat/docs/contributing/). -->

<!-- Provide a general summary of your changes in the Title above -->

### Motivation and context
<!-- Why is this change required? What problem does it solve? If it
fixes an open
issue, please link to the issue here. Describe your changes in detail,
add
screenshots. -->
Adds gamma correction filter

![debounced](https://github.com/opencv/cvat/assets/50956430/c1748100-355c-4dd7-a0fc-b994dc42e8de)

### How has this been tested?
<!-- Please describe in detail how you tested your changes.
Include details of your testing environment, and the tests you ran to
see how your change affects other areas of the code, etc. -->

### Checklist
<!-- Go over all the following points, and put an `x` in all the boxes
that apply.
If an item isn't applicable for some reason, then ~~explicitly
strikethrough~~ the whole
line. If you don't do that, GitHub will show incorrect progress for the
pull request.
If you're unsure about any of these, don't hesitate to ask. We're here
to help! -->
- [x] I submit my changes into the `develop` branch
- [x] I have added a description of my changes into the
[CHANGELOG](https://github.com/opencv/cvat/blob/develop/CHANGELOG.md)
file
- ~~[ ] I have updated the documentation accordingly~~
- ~~[ ] I have added tests to cover my changes~~
- [x] I have linked related issues (see [GitHub docs](

https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword))
- [x] I have increased versions of npm packages if it is necessary

([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning),

[cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning),

[cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning)
and

[cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning))

### License

- [x] I submit _my code changes_ under the same [MIT License](
https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the
project.
  Feel free to contact the maintainers if that's a concern.

---------
Co-authored-by: NBoris Sekachev <boris.sekachev@yandex.ru>
上级 84c0816e
......@@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## \[Unreleased\]
### Added
- Ability to hide/show an object in review mode (<https://github.com/opencv/cvat/pull/6808>)
- Gamma correcton filter (<https://github.com/opencv/cvat/pull/6771>)
### Changed
- \[Helm\] Database migrations now run in a separate job instead of the server pod,
......
// Copyright (C) 2019-2022 Intel Corporation
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
const { join } = require('path');
module.exports = {
ignorePatterns: [
'.eslintrc.js',
......@@ -13,4 +16,12 @@ module.exports = {
project: './tsconfig.json',
tsconfigRootDir: __dirname,
},
rules: {
'import/no-extraneous-dependencies': [
'error',
{
packageDir: [__dirname, join(__dirname, '../')]
},
],
}
};
......@@ -16,8 +16,6 @@
],
"dependencies": {
"@types/polylabel": "^1.0.5",
"@types/fabric": "^4.5.7",
"fabric": "^5.2.1",
"polylabel": "^1.1.0",
"svg.draggable.js": "2.2.2",
"svg.draw.js": "^2.0.4",
......
// Copyright (C) 2020-2022 Intel Corporation
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
const globalConfig = require('../.eslintrc.js');
const { join } = require('path');
module.exports = {
parserOptions: {
......@@ -29,5 +31,11 @@ module.exports = {
'react/jsx-indent-props': ['warn', 4],
'react/jsx-props-no-spreading': 0,
'jsx-quotes': ['error', 'prefer-single'],
'import/no-extraneous-dependencies': [
'error',
{
packageDir: [__dirname, join(__dirname, '../')]
},
],
},
};
{
"name": "cvat-ui",
"version": "1.55.8",
"version": "1.56.0",
"description": "CVAT single-page application",
"main": "src/index.tsx",
"scripts": {
......
// Copyright (C) 2020-2022 Intel Corporation
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
......@@ -6,6 +7,7 @@ import { AnyAction } from 'redux';
import {
GridColor, ColorBy, SettingsState, ToolsBlockerState,
} from 'reducers';
import { ImageFilter, ImageFilterAlias } from 'utils/image-processing';
export enum SettingsActionTypes {
SWITCH_ROTATE_ALL = 'SWITCH_ROTATE_ALL',
......@@ -45,6 +47,9 @@ export enum SettingsActionTypes {
SWITCH_TOOLS_BLOCKER_STATE = 'SWITCH_TOOLS_BLOCKER_STATE',
SWITCH_SHOWING_DELETED_FRAMES = 'SWITCH_SHOWING_DELETED_FRAMES',
SWITCH_SHOWING_TAGS_ON_FRAME = 'SWITCH_SHOWING_TAGS_ON_FRAME',
ENABLE_IMAGE_FILTER = 'ENABLE_IMAGE_FILTER',
DISABLE_IMAGE_FILTER = 'DISABLE_IMAGE_FILTER',
RESET_IMAGE_FILTERS = 'RESET_IMAGE_FILTERS',
}
export function changeShapesOpacity(opacity: number): AnyAction {
......@@ -380,3 +385,29 @@ export function switchShowingTagsOnFrame(showTagsOnFrame: boolean): AnyAction {
},
};
}
export function enableImageFilter(filter: ImageFilter, options: object | null = null): AnyAction {
return {
type: SettingsActionTypes.ENABLE_IMAGE_FILTER,
payload: {
filter,
options,
},
};
}
export function disableImageFilter(filterAlias: ImageFilterAlias): AnyAction {
return {
type: SettingsActionTypes.DISABLE_IMAGE_FILTER,
payload: {
filterAlias,
},
};
}
export function resetImageFilters(): AnyAction {
return {
type: SettingsActionTypes.RESET_IMAGE_FILTERS,
payload: {},
};
}
......@@ -9,6 +9,8 @@ import Slider from 'antd/lib/slider';
import Spin from 'antd/lib/spin';
import Dropdown from 'antd/lib/dropdown';
import { PlusCircleOutlined, UpOutlined } from '@ant-design/icons';
import notification from 'antd/lib/notification';
import debounce from 'lodash/debounce';
import GlobalHotKeys, { KeyMap } from 'utils/mousetrap-react';
import {
......@@ -57,6 +59,7 @@ import {
import { reviewActions } from 'actions/review-actions';
import { filterAnnotations } from 'utils/filter-annotations';
import { ImageFilter } from 'utils/image-processing';
import ImageSetupsContent from './image-setups-content';
import BrushTools from './brush-tools';
......@@ -113,6 +116,7 @@ interface StateToProps {
showGroundTruth: boolean;
highlightedConflict: QualityConflict | null;
groundTruthJobFramesMeta: FramesMetaData | null;
imageFilters: ImageFilter[];
}
interface DispatchToProps {
......@@ -194,6 +198,7 @@ function mapStateToProps(state: CombinedState): StateToProps {
shapes: {
opacity, colorBy, selectedOpacity, outlined, outlineColor, showBitmap, showProjections, showGroundTruth,
},
imageFilters,
},
shortcuts: { keyMap },
review: { conflicts },
......@@ -253,6 +258,7 @@ function mapStateToProps(state: CombinedState): StateToProps {
showGroundTruth,
highlightedConflict,
groundTruthJobFramesMeta,
imageFilters,
};
}
......@@ -358,6 +364,8 @@ function mapDispatchToProps(dispatch: any): DispatchToProps {
type Props = StateToProps & DispatchToProps;
class CanvasWrapperComponent extends React.PureComponent<Props> {
private debouncedUpdate = debounce(this.updateCanvas.bind(this), 250, { leading: true });
public componentDidMount(): void {
const {
automaticBordering,
......@@ -445,6 +453,7 @@ class CanvasWrapperComponent extends React.PureComponent<Props> {
statesSources,
showGroundTruth,
highlightedConflict,
imageFilters,
} = this.props;
const { canvasInstance } = this.props as { canvasInstance: Canvas };
......@@ -530,6 +539,10 @@ class CanvasWrapperComponent extends React.PureComponent<Props> {
});
}
if (prevProps.imageFilters !== imageFilters) {
canvasInstance.configure({ forceFrameUpdate: true });
}
if (
prevProps.annotations !== annotations ||
prevProps.statesSources !== statesSources ||
......@@ -537,6 +550,10 @@ class CanvasWrapperComponent extends React.PureComponent<Props> {
prevProps.curZLayer !== curZLayer
) {
this.updateCanvas();
} else if (prevProps.imageFilters !== imageFilters) {
// In case of frequent image filters changes, we apply debounced canvas update
// that makes UI smoother
this.debouncedUpdate();
}
if (prevProps.showBitmap !== showBitmap) {
......@@ -897,9 +914,11 @@ class CanvasWrapperComponent extends React.PureComponent<Props> {
private updateCanvas(): void {
const {
curZLayer, annotations, frameData, canvasInstance, statesSources,
workspace, groundTruthJobFramesMeta, frame,
curZLayer, annotations, frameData, statesSources,
workspace, groundTruthJobFramesMeta, frame, imageFilters,
} = this.props;
const { canvasInstance } = this.props as { canvasInstance: Canvas };
if (frameData !== null && canvasInstance) {
const filteredAnnotations = filterAnnotations(annotations, {
statesSources,
......@@ -908,12 +927,54 @@ class CanvasWrapperComponent extends React.PureComponent<Props> {
workspace,
exclude: [ObjectType.TAG],
});
const proxy = new Proxy(frameData, {
get: (_frameData, prop, receiver) => {
if (prop === 'data') {
return async () => {
const originalImage = await _frameData.data();
const imageIsNotProcessed = imageFilters.some((imageFilter: ImageFilter) => (
imageFilter.modifier.currentProcessedImage !== frame
));
if (imageIsNotProcessed) {
try {
const { renderWidth, renderHeight, imageData: imageBitmap } = originalImage;
const offscreen = new OffscreenCanvas(renderWidth, renderHeight);
const ctx = offscreen.getContext('2d') as OffscreenCanvasRenderingContext2D;
ctx.drawImage(imageBitmap, 0, 0);
const imageData = ctx.getImageData(0, 0, renderWidth, renderHeight);
const newImageData = imageFilters
.reduce((oldImageData, activeImageModifier) => activeImageModifier
.modifier.processImage(oldImageData, frame), imageData);
const newImageBitmap = await createImageBitmap(newImageData);
return {
renderWidth,
renderHeight,
imageData: newImageBitmap,
};
} catch (error: any) {
notification.error({
description: error.toString(),
message: 'Image processing error occurred',
className: 'cvat-notification-notice-image-processing-error',
});
}
}
return originalImage;
};
}
return Reflect.get(_frameData, prop, receiver);
},
});
canvasInstance.setup(
frameData,
proxy,
frameData.deleted ? [] : filteredAnnotations,
curZLayer,
);
canvasInstance.configure({ forceFrameUpdate: false });
}
}
......
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
import React, { useEffect, useState, useCallback } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { Row, Col } from 'antd/lib/grid';
import { CombinedState } from 'reducers';
import Text from 'antd/lib/typography/Text';
import Slider from 'antd/lib/slider';
import {
enableImageFilter,
disableImageFilter,
} from 'actions/settings-actions';
import GammaCorrection from 'utils/fabric-wrapper/gamma-correciton';
import { ImageFilterAlias, hasFilter } from 'utils/image-processing';
import './image-setups.scss';
export default function GammaFilter(): JSX.Element {
const dispatch = useDispatch();
const [gamma, setGamma] = useState<number>(1);
const filters = useSelector((state: CombinedState) => state.settings.imageFilters);
const gammaFilter = hasFilter(filters, ImageFilterAlias.GAMMA_CORRECTION);
const onChangeGamma = useCallback((newGamma: number): void => {
setGamma(newGamma);
if (newGamma === 1) {
if (gammaFilter) {
dispatch(disableImageFilter(ImageFilterAlias.GAMMA_CORRECTION));
}
} else {
const convertedGamma = [newGamma, newGamma, newGamma];
if (gammaFilter) {
dispatch(enableImageFilter(gammaFilter, { gamma: convertedGamma }));
} else {
dispatch(enableImageFilter({
modifier: new GammaCorrection({ gamma: convertedGamma }),
alias: ImageFilterAlias.GAMMA_CORRECTION,
}));
}
}
}, [gammaFilter]);
useEffect(() => {
if (filters.length === 0) {
setGamma(1);
}
}, [filters]);
return (
<div className='cvat-image-setups-filters'>
<Row justify='space-around'>
<Col span={24}>
<Row className='cvat-image-setups-gamma'>
<Col span={6}>
<Text className='cvat-text-color'> Gamma </Text>
</Col>
<Col span={12}>
<Slider
min={0.2}
max={2.6}
value={gamma}
step={0.01}
onChange={onChangeGamma}
/>
</Col>
</Row>
</Col>
</Row>
</div>
);
}
// Copyright (C) 2021-2022 Intel Corporation
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
......@@ -20,9 +21,11 @@ import {
changeContrastLevel,
changeSaturationLevel,
changeGridSize,
resetImageFilters,
} from 'actions/settings-actions';
import { clamp } from 'utils/math';
import { GridColor, CombinedState, PlayerSettingsState } from 'reducers';
import GammaFilter from './gamma-filter';
const minGridSize = 5;
const maxGridSize = 1000;
......@@ -168,21 +171,23 @@ export default function ImageSetupsContent(): JSX.Element {
/>
</Col>
</Row>
<Row className='cvat-image-setups-reset-color-settings' justify='space-around'>
<Col>
<Button
className='cvat-image-setups-reset-color-settings-button'
onClick={() => {
const defaultValue = 100;
dispatch(changeBrightnessLevel(defaultValue));
dispatch(changeContrastLevel(defaultValue));
dispatch(changeSaturationLevel(defaultValue));
}}
>
Reset color settings
</Button>
</Col>
</Row>
</Col>
</Row>
<GammaFilter />
<Row className='cvat-image-setups-reset-color-settings' justify='space-around'>
<Col>
<Button
className='cvat-image-setups-reset-color-settings-button'
onClick={() => {
const defaultValue = 100;
dispatch(changeBrightnessLevel(defaultValue));
dispatch(changeContrastLevel(defaultValue));
dispatch(changeSaturationLevel(defaultValue));
dispatch(resetImageFilters());
}}
>
Reset color settings
</Button>
</Col>
</Row>
</div>
......
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
@import 'base';
.cvat-image-setups-filters {
margin-bottom: $grid-unit-size * 3;
}
// Copyright (C) 2021-2022 Intel Corporation
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
......@@ -36,8 +37,9 @@ import CVATTooltip from 'components/common/cvat-tooltip';
import ApproximationAccuracy, {
thresholdFromAccuracy,
} from 'components/annotation-page/standard-workspace/controls-side-bar/approximation-accuracy';
import { ImageProcessing, OpenCVTracker, TrackerModel } from 'utils/opencv-wrapper/opencv-interfaces';
import { switchToolsBlockerState } from 'actions/settings-actions';
import { OpenCVTracker, TrackerModel } from 'utils/opencv-wrapper/opencv-interfaces';
import { enableImageFilter as enableImageFilterAction, disableImageFilter as disableImageFilterAction, switchToolsBlockerState } from 'actions/settings-actions';
import { ImageFilter, ImageFilterAlias, hasFilter } from 'utils/image-processing';
import withVisibilityHandling from './handle-popover-visibility';
interface Props {
......@@ -52,6 +54,7 @@ interface Props {
frameData: any;
toolsBlockerState: ToolsBlockerState;
activeControl: ActiveControl;
filters: ImageFilter[];
}
interface DispatchToProps {
......@@ -62,6 +65,8 @@ interface DispatchToProps {
changeFrame(toFrame: number, fillBuffer?: boolean, frameStep?: number, forceUpdate?: boolean):void;
onSwitchToolsBlockerState(toolsBlockerState: ToolsBlockerState):void;
switchNavigationBlocked(navigationBlocked: boolean): void;
enableImageFilter(filter: ImageFilter): void;
disableImageFilter(filterAlias: string): void;
}
interface TrackedShape {
......@@ -76,16 +81,10 @@ interface State {
initializationProgress: number;
activeLabelID: number;
approxPolyAccuracy: number;
activeImageModifiers: ImageModifier[];
mode: 'interaction' | 'tracking';
trackedShapes: TrackedShape[];
activeTracker: OpenCVTracker | null;
trackers: OpenCVTracker[]
}
interface ImageModifier {
modifier: ImageProcessing,
alias: string
trackers: OpenCVTracker[];
}
const core = getCore();
......@@ -106,6 +105,7 @@ function mapStateToProps(state: CombinedState): Props {
},
settings: {
workspace: { defaultApproxPolyAccuracy, toolsBlockerState },
imageFilters: filters,
},
} = state;
......@@ -121,6 +121,7 @@ function mapStateToProps(state: CombinedState): Props {
frame,
frameData,
toolsBlockerState,
filters,
};
}
......@@ -132,19 +133,19 @@ const mapDispatchToProps = {
changeFrame: changeFrameAsync,
onSwitchToolsBlockerState: switchToolsBlockerState,
switchNavigationBlocked: switchNavigationBlockedAction,
enableImageFilter: enableImageFilterAction,
disableImageFilter: disableImageFilterAction,
};
class OpenCVControlComponent extends React.PureComponent<Props & DispatchToProps, State> {
private activeTool: IntelligentScissors | null;
private latestPoints: number[];
private canvasForceUpdateWasEnabled: boolean;
public constructor(props: Props & DispatchToProps) {
super(props);
const { labels, defaultApproxPolyAccuracy } = props;
this.activeTool = null;
this.latestPoints = [];
this.canvasForceUpdateWasEnabled = false;
this.state = {
libraryInitialized: openCVWrapper.isInitialized,
......@@ -152,7 +153,6 @@ class OpenCVControlComponent extends React.PureComponent<Props & DispatchToProps
initializationProgress: -1,
approxPolyAccuracy: defaultApproxPolyAccuracy,
activeLabelID: labels.length ? labels[0].id : null,
activeImageModifiers: [],
mode: 'interaction',
trackedShapes: [],
trackers: openCVWrapper.isInitialized ? Object.values(openCVWrapper.tracking) : [],
......@@ -163,7 +163,6 @@ class OpenCVControlComponent extends React.PureComponent<Props & DispatchToProps
public componentDidMount(): void {
const { canvasInstance } = this.props;
canvasInstance.html().addEventListener('canvas.interacted', this.interactionListener);
canvasInstance.html().addEventListener('canvas.setup', this.runImageModifier);
}
public componentDidUpdate(prevProps: Props, prevState: State): void {
......@@ -209,7 +208,6 @@ class OpenCVControlComponent extends React.PureComponent<Props & DispatchToProps
public componentWillUnmount(): void {
const { canvasInstance } = this.props;
canvasInstance.html().removeEventListener('canvas.interacted', this.interactionListener);
canvasInstance.html().removeEventListener('canvas.setup', this.runImageModifier);
openCVWrapper.removeProgressCallback();
}
......@@ -383,46 +381,6 @@ class OpenCVControlComponent extends React.PureComponent<Props & DispatchToProps
}
};
private runImageModifier = async ():Promise<void> => {
const { activeImageModifiers } = this.state;
const {
frameData, states, curZOrder, canvasInstance, frame,
} = this.props;
try {
if (activeImageModifiers.length !== 0 && activeImageModifiers[0].modifier.currentProcessedImage !== frame) {
this.enableCanvasForceUpdate();
const imageData = this.getCanvasImageData();
const newImageData = activeImageModifiers
.reduce((oldImageData, activeImageModifier) => activeImageModifier
.modifier.processImage(oldImageData, frame), imageData);
const imageBitmap = await createImageBitmap(newImageData);
const proxy = new Proxy(frameData, {
get: (_frameData, prop, receiver) => {
if (prop === 'data') {
return async () => ({
renderWidth: imageData.width,
renderHeight: imageData.height,
imageData: imageBitmap,
});
}
return Reflect.get(_frameData, prop, receiver);
},
});
canvasInstance.setup(proxy, states, curZOrder);
}
} catch (error: any) {
notification.error({
description: error.toString(),
message: 'OpenCV.js processing error occurred',
className: 'cvat-notification-notice-opencv-processing-error',
});
} finally {
this.disableCanvasForceUpdate();
}
};
private applyTracking = (imageData: ImageData, shape: TrackedShape,
objectState: any): Promise<void> => new Promise((resolve, reject) => {
setTimeout(() => {
......@@ -566,45 +524,6 @@ class OpenCVControlComponent extends React.PureComponent<Props & DispatchToProps
return points;
}
private imageModifier(alias: string): ImageProcessing | null {
const { activeImageModifiers } = this.state;
return activeImageModifiers.find((imageModifier) => imageModifier.alias === alias)?.modifier || null;
}
private disableImageModifier(alias: string):void {
const { activeImageModifiers } = this.state;
const index = activeImageModifiers.findIndex((imageModifier) => imageModifier.alias === alias);
if (index !== -1) {
activeImageModifiers.splice(index, 1);
this.setState({
activeImageModifiers: [...activeImageModifiers],
});
}
}
private enableImageModifier(modifier: ImageProcessing, alias: string): void {
this.setState((prev: State) => ({
...prev,
activeImageModifiers: [...prev.activeImageModifiers, { modifier, alias }],
}), () => {
this.runImageModifier();
});
}
private enableCanvasForceUpdate():void {
const { canvasInstance } = this.props;
canvasInstance.configure({ forceFrameUpdate: true });
this.canvasForceUpdateWasEnabled = true;
}
private disableCanvasForceUpdate():void {
if (this.canvasForceUpdateWasEnabled) {
const { canvasInstance } = this.props;
canvasInstance.configure({ forceFrameUpdate: false });
this.canvasForceUpdateWasEnabled = false;
}
}
private async initializeOpenCV():Promise<void> {
try {
this.setState({
......@@ -675,27 +594,26 @@ class OpenCVControlComponent extends React.PureComponent<Props & DispatchToProps
}
private renderImageContent():JSX.Element {
const { enableImageFilter, disableImageFilter, filters } = this.props;
return (
<Row justify='start'>
<Col>
<CVATTooltip title='Histogram equalization' className='cvat-opencv-image-tool'>
<Button
className={
this.imageModifier('histogram') ?
hasFilter(filters, ImageFilterAlias.HISTOGRAM_EQUALIZATION) ?
'cvat-opencv-histogram-tool-button cvat-opencv-image-tool-active' : 'cvat-opencv-histogram-tool-button'
}
onClick={(e: React.MouseEvent<HTMLElement>) => {
const modifier = this.imageModifier('histogram');
if (!modifier) {
this.enableImageModifier(openCVWrapper.imgproc.hist(), 'histogram');
if (!hasFilter(filters, ImageFilterAlias.HISTOGRAM_EQUALIZATION)) {
enableImageFilter({
modifier: openCVWrapper.imgproc.hist(),
alias: ImageFilterAlias.HISTOGRAM_EQUALIZATION,
});
} else {
const button = e.target as HTMLElement;
button.blur();
this.disableImageModifier('histogram');
const { changeFrame } = this.props;
const { frame } = this.props;
this.enableCanvasForceUpdate();
changeFrame(frame, false, 1, true);
disableImageFilter(ImageFilterAlias.HISTOGRAM_EQUALIZATION);
}
}}
>
......
......@@ -12,6 +12,7 @@ import {
import { IntelligentScissors } from 'utils/opencv-wrapper/intelligent-scissors';
import { KeyMap } from 'utils/mousetrap-react';
import { OpenCVTracker } from 'utils/opencv-wrapper/opencv-interfaces';
import { ImageFilter } from 'utils/image-processing';
export type StringObject = {
[index: string]: string;
......@@ -826,6 +827,7 @@ export interface SettingsState {
shapes: ShapesSettingsState;
workspace: WorkspaceSettingsState;
player: PlayerSettingsState;
imageFilters: ImageFilter[];
showDialog: boolean;
}
......
......@@ -60,6 +60,7 @@ const defaultState: SettingsState = {
contrastLevel: 100,
saturationLevel: 100,
},
imageFilters: [],
showDialog: false,
};
......@@ -394,6 +395,49 @@ export default (state = defaultState, action: AnyAction): SettingsState => {
},
};
}
case SettingsActionTypes.ENABLE_IMAGE_FILTER: {
const { filter, options } = action.payload;
const { alias } = filter;
const filters = [...state.imageFilters];
const index = filters.findIndex((imageFilter) => imageFilter.alias === alias);
if (options && index !== -1) {
const enabledFilter = filters[index];
enabledFilter.modifier.currentProcessedImage = null;
enabledFilter.modifier.configure(options);
return {
...state,
imageFilters: filters,
};
}
return {
...state,
imageFilters: [
...state.imageFilters,
action.payload.filter,
],
};
}
case SettingsActionTypes.DISABLE_IMAGE_FILTER: {
const { filterAlias } = action.payload;
const filters = [...state.imageFilters];
const index = filters.findIndex((imageFilter) => imageFilter.alias === filterAlias);
if (index !== -1) {
filters.splice(index, 1);
}
filters.forEach((imageFilter) => {
imageFilter.modifier.currentProcessedImage = null;
});
return {
...state,
imageFilters: filters,
};
}
case SettingsActionTypes.RESET_IMAGE_FILTERS: {
return {
...state,
imageFilters: [],
};
}
case AnnotationActionTypes.UPLOAD_JOB_ANNOTATIONS_SUCCESS:
case AnnotationActionTypes.CREATE_ANNOTATIONS_SUCCESS:
case AnnotationActionTypes.CHANGE_FRAME_SUCCESS: {
......@@ -433,6 +477,7 @@ export default (state = defaultState, action: AnyAction): SettingsState => {
} :
{}),
},
imageFilters: [],
};
}
case AnnotationActionTypes.INTERACT_WITH_CANVAS: {
......
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
import { BaseImageFilter } from 'utils/image-processing';
export default class FabricFilter extends BaseImageFilter {
public processImage(src: ImageData, frameNumber: number): ImageData {
if (this.filter) {
const dataCopy = new Uint8ClampedArray(src.data);
const result = new ImageData(dataCopy, src.width, src.height);
this.filter.applyTo2d({
imageData: result,
});
this.currentProcessedImage = frameNumber;
return result;
}
return src;
}
public configure(options: object): void {
if (this.filter) {
this.filter.setOptions(options);
}
}
}
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
import { fabric } from 'fabric';
import FabricFilter from './fabric-wrapper';
export interface GammaFilterOptions {
gamma: number[];
}
export default class GammaCorrection extends FabricFilter {
constructor(options: GammaFilterOptions) {
super();
const { gamma } = options;
if (!Array.isArray(gamma) || gamma.length !== 3) {
throw Error(`Incorrect option for gamma filter, expected array: [R, G, B] got ${gamma}`);
}
// @ts-ignore: Some filters are not typed yet https://github.com/DefinitelyTyped/DefinitelyTyped/issues/62371
this.filter = new fabric.Image.filters.Gamma({
gamma,
});
}
}
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
import { fabric } from 'fabric';
export type ConfigurableFilterType = fabric.IBaseFilter;
export interface ImageProcessing {
filter: ConfigurableFilterType | null;
currentProcessedImage: number | null;
processImage: (src: ImageData, frameNumber: number) => ImageData;
configure: (options: object) => void;
}
/* eslint @typescript-eslint/no-unused-vars: ["error", { "argsIgnorePattern": "^_" }] */
export class BaseImageFilter implements ImageProcessing {
public filter: fabric.IBaseFilter | null = null;
public currentProcessedImage: number | null = null;
processImage(_r: ImageData, _frameNumber: number): ImageData {
throw new Error('Process image is not implemented');
}
configure(_options: object): void {}
}
export interface ImageFilter {
modifier: ImageProcessing,
alias: ImageFilterAlias
}
export enum ImageFilterAlias {
HISTOGRAM_EQUALIZATION = 'opencv.histogramEqualizaton',
GAMMA_CORRECTION = 'fabric.gammaCorrection',
}
export function hasFilter(filters: ImageFilter[], alias: ImageFilterAlias): ImageFilter | null {
const index = filters.findIndex((imageFilter) => imageFilter.alias === alias);
if (index !== -1) {
return filters[index];
}
return null;
}
// Copyright (C) 2021-2022 Intel Corporation
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
import { ImageProcessing } from './opencv-interfaces';
import { BaseImageFilter, ImageProcessing } from 'utils/image-processing';
export interface HistogramEqualization extends ImageProcessing{
processImage: (src:ImageData, frameNumber: number)=>ImageData;
processImage: (src: ImageData, frameNumber: number) => ImageData;
}
interface HashedImage{
frameNumber: number,
frameData: ImageData,
timestamp: number,
}
export default class HistogramEqualizationImplementation implements HistogramEqualization {
private readonly bufferSize: number = 20;
export default class HistogramEqualizationImplementation extends BaseImageFilter {
private cv:any;
private histHash: HashedImage[];
public currentProcessedImage: number | undefined;
constructor(cv:any) {
super();
this.cv = cv;
this.histHash = [];
}
public processImage(src:ImageData, frameNumber: number) : ImageData {
const hashedFrame = this.hashedFrame(frameNumber);
if (!hashedFrame) {
const { cv } = this;
let matImage = null;
const RGBImage = new cv.Mat();
const YUVImage = new cv.Mat();
const RGBDist = new cv.Mat();
const YUVDist = new cv.Mat();
const RGBADist = new cv.Mat();
let channels = new cv.MatVector();
const equalizedY = new cv.Mat();
try {
this.currentProcessedImage = frameNumber;
matImage = cv.matFromImageData(src);
cv.cvtColor(matImage, RGBImage, cv.COLOR_RGBA2RGB, 0);
cv.cvtColor(RGBImage, YUVImage, cv.COLOR_RGB2YUV, 0);
cv.split(YUVImage, channels);
const [Y, U, V] = [channels.get(0), channels.get(1), channels.get(2)];
channels.delete();
channels = null;
cv.equalizeHist(Y, equalizedY);
Y.delete();
channels = new cv.MatVector();
channels.push_back(equalizedY); equalizedY.delete();
channels.push_back(U); U.delete();
channels.push_back(V); V.delete();
cv.merge(channels, YUVDist);
cv.cvtColor(YUVDist, RGBDist, cv.COLOR_YUV2RGB, 0);
cv.cvtColor(RGBDist, RGBADist, cv.COLOR_RGB2RGBA, 0);
const arr = new Uint8ClampedArray(RGBADist.data, RGBADist.cols, RGBADist.rows);
const imgData = new ImageData(arr, src.width, src.height);
this.hashFrame(imgData, frameNumber);
return imgData;
} catch (e) {
throw new Error(e.toString());
} finally {
if (matImage) matImage.delete();
if (channels) channels.delete();
RGBImage.delete();
YUVImage.delete();
RGBDist.delete();
YUVDist.delete();
RGBADist.delete();
}
} else {
const { cv } = this;
let matImage = null;
const RGBImage = new cv.Mat();
const YUVImage = new cv.Mat();
const RGBDist = new cv.Mat();
const YUVDist = new cv.Mat();
const RGBADist = new cv.Mat();
let channels = new cv.MatVector();
const equalizedY = new cv.Mat();
try {
this.currentProcessedImage = frameNumber;
return hashedFrame;
}
}
private hashedFrame(frameNumber: number): ImageData | null {
const hashed = this.histHash.find((_hashed) => _hashed.frameNumber === frameNumber);
if (hashed) {
hashed.timestamp = Date.now();
}
return hashed?.frameData || null;
}
private hashFrame(frameData: ImageData, frameNumber: number): void {
if (this.histHash.length >= this.bufferSize) {
const leastRecentlyUsed = this.histHash[0];
const currentTimestamp = Date.now();
let diff = currentTimestamp - leastRecentlyUsed.timestamp;
let leastIndex = 0;
for (let i = 1; i < this.histHash.length; i++) {
const currentDiff = currentTimestamp - this.histHash[i].timestamp;
if (currentDiff > diff) {
diff = currentDiff;
leastIndex = i;
}
}
this.histHash.splice(leastIndex, 1);
matImage = cv.matFromImageData(src);
cv.cvtColor(matImage, RGBImage, cv.COLOR_RGBA2RGB, 0);
cv.cvtColor(RGBImage, YUVImage, cv.COLOR_RGB2YUV, 0);
cv.split(YUVImage, channels);
const [Y, U, V] = [channels.get(0), channels.get(1), channels.get(2)];
channels.delete();
channels = null;
cv.equalizeHist(Y, equalizedY);
Y.delete();
channels = new cv.MatVector();
channels.push_back(equalizedY); equalizedY.delete();
channels.push_back(U); U.delete();
channels.push_back(V); V.delete();
cv.merge(channels, YUVDist);
cv.cvtColor(YUVDist, RGBDist, cv.COLOR_YUV2RGB, 0);
cv.cvtColor(RGBDist, RGBADist, cv.COLOR_RGB2RGBA, 0);
const arr = new Uint8ClampedArray(RGBADist.data, RGBADist.cols, RGBADist.rows);
const imgData = new ImageData(arr, src.width, src.height);
return imgData;
} catch (e) {
throw new Error(e.toString());
} finally {
if (matImage) matImage.delete();
if (channels) channels.delete();
RGBImage.delete();
YUVImage.delete();
RGBDist.delete();
YUVDist.delete();
RGBADist.delete();
}
this.histHash.push({
frameData,
frameNumber,
timestamp: Date.now(),
});
}
}
// Copyright (C) 2021-2022 Intel Corporation
// Copyright (C) 2023 CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
export interface ImageProcessing {
processImage: (src: ImageData, frameNumber: number) => ImageData;
currentProcessedImage: number | undefined;
}
export interface TrackingResult {
updated: boolean;
points: number[];
......
......@@ -126,5 +126,8 @@
"@types/react-router-dom": "^5.3.3",
"@types/prettier": "2.4.1"
},
"dependencies": {}
"dependencies": {
"@types/fabric": "^4.5.7",
"fabric": "^5.2.1"
}
}
......@@ -173,7 +173,7 @@ context('OpenCV. Intelligent scissors. Histogram Equalization. TrackerMIL.', ()
.should('have.class', 'ant-tabs-tab-active');
cy.get('.cvat-opencv-image-tool').click();
cy.get('.cvat-opencv-image-tool').should('have.class', 'cvat-opencv-image-tool-active');
cy.get('.cvat-notification-notice-opencv-processing-error').should('not.exist');
cy.get('.cvat-notification-notice-image-processing-error').should('not.exist');
cy.get('.cvat-opencv-image-tool').click();
cy.get('.cvat-opencv-image-tool').should('not.have.class', 'cvat-opencv-image-tool-active');
cy.get('.cvat-opencv-image-tool').trigger('mouseleave').trigger('mouseout');
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册