diff --git a/CHANGELOG.md b/CHANGELOG.md index 1582131ecb1efa23fbe9174f3a1e62246b7dff26..8e66af34fc1390b07a8773019f93f4163fe8e315 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Ability to export/import tasks () - Add a tutorial for semi-automatic/automatic annotation () - Explicit "Done" button when drawing any polyshapes () +- Histogram equalization with OpenCV javascript () ### Changed diff --git a/cvat-canvas/src/typescript/canvasModel.ts b/cvat-canvas/src/typescript/canvasModel.ts index 1d6b009410b0b512117c6902a11e0b3dda32f2c0..0da3bce42a91517de8aa0a0ecf2210f44931d706 100644 --- a/cvat-canvas/src/typescript/canvasModel.ts +++ b/cvat-canvas/src/typescript/canvasModel.ts @@ -58,6 +58,7 @@ export interface Configuration { showProjections?: boolean; forceDisableEditing?: boolean; intelligentPolygonCrop?: boolean; + forceFrameUpdate?: boolean; } export interface DrawData { @@ -392,8 +393,7 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel { throw Error(`Canvas is busy. Action: ${this.data.mode}`); } } - - if (frameData.number === this.data.imageID) { + if (frameData.number === this.data.imageID && !this.data.configuration.forceFrameUpdate) { this.data.zLayer = zLayer; this.data.objects = objectStates; this.notify(UpdateReasons.OBJECTS_UPDATED); @@ -652,6 +652,10 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel { this.data.configuration.intelligentPolygonCrop = configuration.intelligentPolygonCrop; } + if (typeof configuration.forceFrameUpdate === 'boolean') { + this.data.configuration.forceFrameUpdate = configuration.forceFrameUpdate; + } + this.notify(UpdateReasons.CONFIG_UPDATED); } diff --git a/cvat-core/src/api.js b/cvat-core/src/api.js index a5d36b6ce833b9f450141f46c9eb880ca977b44c..7067d560ab939e7b322b54a8c760665286d0563e 100644 --- a/cvat-core/src/api.js +++ b/cvat-core/src/api.js @@ -20,6 +20,7 @@ function build() { const { Project } = require('./project'); const { Attribute, Label } = require('./labels'); const MLModel = require('./ml-model'); + const { FrameData } = require('./frames'); const enums = require('./enums'); @@ -765,6 +766,7 @@ function build() { Comment, Issue, Review, + FrameData, }, }; diff --git a/cvat-core/src/frames.js b/cvat-core/src/frames.js index 478b6c6569fb87cf1541db17411b326fd4e4b3c8..fb4c4650a7e1ca844ec59262592feaa86486a295 100644 --- a/cvat-core/src/frames.js +++ b/cvat-core/src/frames.js @@ -124,6 +124,14 @@ const result = await PluginRegistry.apiWrapper.call(this, FrameData.prototype.data, onServerRequest); return result; } + + get imageData() { + return this._data.imageData; + } + + set imageData(imageData) { + this._data.imageData = imageData; + } } FrameData.prototype.data.implementation = async function (onServerRequest) { diff --git a/cvat-ui/src/actions/annotation-actions.ts b/cvat-ui/src/actions/annotation-actions.ts index 9a3c866ef36c75e54ef2aa32a4a70b574385d852..96bfb8d20dc60a88616a806a234a9a307da324a3 100644 --- a/cvat-ui/src/actions/annotation-actions.ts +++ b/cvat-ui/src/actions/annotation-actions.ts @@ -689,7 +689,8 @@ export function getPredictionsAsync(): ThunkAction { }; } -export function changeFrameAsync(toFrame: number, fillBuffer?: boolean, frameStep?: number): ThunkAction { +export function changeFrameAsync(toFrame: number, fillBuffer?: boolean, frameStep?: number, + forceUpdate?: boolean): ThunkAction { return async (dispatch: ActionCreator): Promise => { const state: CombinedState = getStore().getState(); const { instance: job } = state.annotation.job; @@ -700,7 +701,7 @@ export function changeFrameAsync(toFrame: number, fillBuffer?: boolean, frameSte throw Error(`Required frame ${toFrame} is out of the current job`); } - if (toFrame === frame) { + if (toFrame === frame && !forceUpdate) { dispatch({ type: AnnotationActionTypes.CHANGE_FRAME_SUCCESS, payload: { @@ -719,7 +720,6 @@ export function changeFrameAsync(toFrame: number, fillBuffer?: boolean, frameSte return; } - // Start async requests dispatch({ type: AnnotationActionTypes.CHANGE_FRAME, diff --git a/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx b/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx index f91233eaeccf452bb3f456faeaaf6b9f156468a9..f2f86766ca30e532b004ae9c336a3ef7ba3dae56 100644 --- a/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx +++ b/cvat-ui/src/components/annotation-page/standard-workspace/controls-side-bar/opencv-control.tsx @@ -6,7 +6,7 @@ import React from 'react'; import { connect } from 'react-redux'; import { Row, Col } from 'antd/lib/grid'; import Popover from 'antd/lib/popover'; -import Icon, { ScissorOutlined } from '@ant-design/icons'; +import Icon, { AreaChartOutlined, ScissorOutlined } from '@ant-design/icons'; import Text from 'antd/lib/typography/Text'; import Tabs from 'antd/lib/tabs'; import Button from 'antd/lib/button'; @@ -26,9 +26,11 @@ import { fetchAnnotationsAsync, updateAnnotationsAsync, createAnnotationsAsync, + changeFrameAsync, } from 'actions/annotation-actions'; import LabelSelector from 'components/label-selector/label-selector'; import CVATTooltip from 'components/common/cvat-tooltip'; +import { ImageProcessing } from 'utils/opencv-wrapper/opencv-interfaces'; import withVisibilityHandling from './handle-popover-visibility'; interface Props { @@ -39,6 +41,7 @@ interface Props { states: any[]; frame: number; curZOrder: number; + frameData: any; } interface DispatchToProps { @@ -46,6 +49,7 @@ interface DispatchToProps { updateAnnotations(statesToUpdate: any[]): void; createAnnotations(sessionInstance: any, frame: number, statesToCreate: any[]): void; fetchAnnotations(): void; + changeFrame(toFrame: number, fillBuffer?: boolean, frameStep?: number, forceUpdate?: boolean):void; } interface State { @@ -53,6 +57,12 @@ interface State { initializationError: boolean; initializationProgress: number; activeLabelID: number; + activeImageModifiers: ImageModifier[]; +} + +interface ImageModifier { + modifier: ImageProcessing, + alias: string } const core = getCore(); @@ -68,7 +78,7 @@ function mapStateToProps(state: CombinedState): Props { job: { instance: jobInstance, labels }, canvas: { activeControl, instance: canvasInstance }, player: { - frame: { number: frame }, + frame: { number: frame, data: frameData }, }, }, } = state; @@ -81,6 +91,7 @@ function mapStateToProps(state: CombinedState): Props { labels, states, frame, + frameData, }; } @@ -89,26 +100,32 @@ const mapDispatchToProps = { updateAnnotations: updateAnnotationsAsync, fetchAnnotations: fetchAnnotationsAsync, createAnnotations: createAnnotationsAsync, + changeFrame: changeFrameAsync, }; class OpenCVControlComponent extends React.PureComponent { private activeTool: IntelligentScissors | null; + private canvasForceUpdateWasEnabled: boolean; public constructor(props: Props & DispatchToProps) { super(props); const { labels } = props; this.activeTool = null; + this.canvasForceUpdateWasEnabled = false; + this.state = { libraryInitialized: openCVWrapper.isInitialized, initializationError: false, initializationProgress: -1, activeLabelID: labels.length ? labels[0].id : null, + activeImageModifiers: [], }; } public componentDidMount(): void { const { canvasInstance } = this.props; canvasInstance.html().addEventListener('canvas.interacted', this.interactionListener); + canvasInstance.html().addEventListener('canvas.setup', this.runImageModifier); } public componentDidUpdate(prevProps: Props): void { @@ -124,6 +141,7 @@ class OpenCVControlComponent extends React.PureComponent => { @@ -173,6 +191,42 @@ class OpenCVControlComponent extends React.PureComponent => { + const { activeImageModifiers } = this.state; + const { + frameData, states, curZOrder, canvasInstance, frame, + } = this.props; + try { + if (activeImageModifiers.length !== 0 && activeImageModifiers[0].modifier.currentProcessedImage !== frame) { + this.enableCanvasForceUpdate(); + const canvas: HTMLCanvasElement | undefined = window.document.getElementById('cvat_canvas_background') as + | HTMLCanvasElement + | undefined; + if (!canvas) { + throw new Error('Element #cvat_canvas_background was not found'); + } + const { width, height } = canvas; + const context = canvas.getContext('2d'); + if (!context) { + throw new Error('Canvas context is empty'); + } + const imageData = context.getImageData(0, 0, width, height); + const newImageData = activeImageModifiers.reduce((oldImageData, activeImageModifier) => + activeImageModifier.modifier.processImage(oldImageData, frame), imageData); + const imageBitmap = await createImageBitmap(newImageData); + frameData.imageData = imageBitmap; + canvasInstance.setup(frameData, states, curZOrder); + } + } catch (error) { + notification.error({ + description: error.toString(), + message: 'OpenCV.js processing error occured', + }); + } finally { + this.disableCanvasForceUpdate(); + } + }; + private async runCVAlgorithm(pressedPoints: number[], threshold: number): Promise { // Getting image data const canvas: HTMLCanvasElement | undefined = window.document.getElementById('cvat_canvas_background') as @@ -215,6 +269,45 @@ class OpenCVControlComponent extends React.PureComponent imageModifier.alias === alias)?.modifier || null; + } + + private disableImageModifier(alias: string):void { + const { activeImageModifiers } = this.state; + const index = activeImageModifiers.findIndex((imageModifier) => imageModifier.alias === alias); + if (index !== -1) { + activeImageModifiers.splice(index, 1); + this.setState({ + activeImageModifiers: [...activeImageModifiers], + }); + } + } + + private enableImageModifier(modifier: ImageProcessing, alias: string): void{ + this.setState((prev: State) => ({ + ...prev, + activeImageModifiers: [...prev.activeImageModifiers, { modifier, alias }], + }), () => { + this.runImageModifier(); + }); + } + + private enableCanvasForceUpdate():void{ + const { canvasInstance } = this.props; + canvasInstance.configure({ forceFrameUpdate: true }); + this.canvasForceUpdateWasEnabled = true; + } + + private disableCanvasForceUpdate():void{ + if (this.canvasForceUpdateWasEnabled) { + const { canvasInstance } = this.props; + canvasInstance.configure({ forceFrameUpdate: false }); + this.canvasForceUpdateWasEnabled = false; + } + } + private renderDrawingContent(): JSX.Element { const { activeLabelID } = this.state; const { labels, canvasInstance, onInteractionStart } = this.props; @@ -254,6 +347,36 @@ class OpenCVControlComponent extends React.PureComponent + + + + + + + ); + } + private renderContent(): JSX.Element { const { libraryInitialized, initializationProgress, initializationError } = this.state; @@ -271,7 +394,9 @@ class OpenCVControlComponent extends React.PureComponent {this.renderDrawingContent()} - + + {this.renderImageContent()} + ) : ( <> diff --git a/cvat-ui/src/components/annotation-page/standard-workspace/styles.scss b/cvat-ui/src/components/annotation-page/standard-workspace/styles.scss index ccc6962f836bd70c0556ac08856c91bea75d5d96..6897e2f1cb87d7db070eed2be0d2d77aab166f04 100644 --- a/cvat-ui/src/components/annotation-page/standard-workspace/styles.scss +++ b/cvat-ui/src/components/annotation-page/standard-workspace/styles.scss @@ -228,6 +228,15 @@ } } +.cvat-opencv-image-tool { + @extend .cvat-opencv-drawing-tool; +} + +.cvat-opencv-image-tool-active { + color: #40a9ff; + border-color: #40a9ff; +} + .cvat-setup-tag-popover-content, .cvat-draw-shape-popover-content { padding: $grid-unit-size; diff --git a/cvat-ui/src/utils/opencv-wrapper/histogram-equalization.ts b/cvat-ui/src/utils/opencv-wrapper/histogram-equalization.ts new file mode 100644 index 0000000000000000000000000000000000000000..7bf3396678006f11cc56cef0701fef74e49e3e30 --- /dev/null +++ b/cvat-ui/src/utils/opencv-wrapper/histogram-equalization.ts @@ -0,0 +1,109 @@ +// Copyright (C) 2021 Intel Corporation +// +// SPDX-License-Identifier: MIT + +import { ImageProcessing } from './opencv-interfaces'; + +export interface HistogramEqualization extends ImageProcessing{ + processImage: (src:ImageData, frameNumber: number)=>ImageData; +} + +interface HashedImage{ + frameNumber: number, + frameData: ImageData, + timestamp: number, +} + +export default class HistogramEqualizationImplementation implements HistogramEqualization { + private readonly bufferSize: number = 20; + private cv:any; + private histHash: HashedImage[]; + public currentProcessedImage: number | undefined; + + constructor(cv:any) { + this.cv = cv; + this.histHash = []; + } + + public processImage(src:ImageData, frameNumber: number) : ImageData { + const hashedFrame = this.hashedFrame(frameNumber); + if (!hashedFrame) { + const { cv } = this; + let matImage = null; + const RGBImage = new cv.Mat(); + const YUVImage = new cv.Mat(); + const RGBDist = new cv.Mat(); + const YUVDist = new cv.Mat(); + const RGBADist = new cv.Mat(); + let channels = new cv.MatVector(); + const equalizedY = new cv.Mat(); + try { + this.currentProcessedImage = frameNumber; + matImage = cv.matFromImageData(src); + cv.cvtColor(matImage, RGBImage, cv.COLOR_RGBA2RGB, 0); + cv.cvtColor(RGBImage, YUVImage, cv.COLOR_RGB2YUV, 0); + cv.split(YUVImage, channels); + const [Y, U, V] = [channels.get(0), channels.get(1), channels.get(2)]; + channels.delete(); + channels = null; + cv.equalizeHist(Y, equalizedY); + Y.delete(); + channels = new cv.MatVector(); + channels.push_back(equalizedY); equalizedY.delete(); + channels.push_back(U); U.delete(); + channels.push_back(V); V.delete(); + cv.merge(channels, YUVDist); + cv.cvtColor(YUVDist, RGBDist, cv.COLOR_YUV2RGB, 0); + cv.cvtColor(RGBDist, RGBADist, cv.COLOR_RGB2RGBA, 0); + const arr = new Uint8ClampedArray(RGBADist.data, RGBADist.cols, RGBADist.rows); + const imgData = new ImageData(arr, src.width, src.height); + this.hashFrame(imgData, frameNumber); + return imgData; + } catch (e) { + console.log('Histogram equalization error', e); + return src; + } finally { + if (matImage) matImage.delete(); + if (channels) channels.delete(); + RGBImage.delete(); + YUVImage.delete(); + RGBDist.delete(); + YUVDist.delete(); + RGBADist.delete(); + } + } else { + this.currentProcessedImage = frameNumber; + return hashedFrame; + } + } + + private hashedFrame(frameNumber: number): ImageData|null { + const hashed = this.histHash.find((_hashed) => _hashed.frameNumber === frameNumber); + if (hashed) { + hashed.timestamp = Date.now(); + } + return hashed?.frameData || null; + } + + private hashFrame(frameData:ImageData, frameNumber:number):void{ + if (this.histHash.length >= this.bufferSize) { + const leastRecentlyUsed = this.histHash[0]; + const currentTimestamp = Date.now(); + let diff = currentTimestamp - leastRecentlyUsed.timestamp; + let leastIndex = 0; + for (let i = 1; i < this.histHash.length; i++) { + const currentDiff = currentTimestamp - this.histHash[i].timestamp; + if (currentDiff > diff) { + diff = currentDiff; + leastIndex = i; + } + } + this.histHash.splice(leastIndex, 1); + } + this.histHash.push({ + frameData, + frameNumber, + timestamp: Date.now(), + }); + } +} diff --git a/cvat-ui/src/utils/opencv-wrapper/opencv-interfaces.ts b/cvat-ui/src/utils/opencv-wrapper/opencv-interfaces.ts new file mode 100644 index 0000000000000000000000000000000000000000..fd82fb2829025b902a3333e8030a6d45fb6ddb3d --- /dev/null +++ b/cvat-ui/src/utils/opencv-wrapper/opencv-interfaces.ts @@ -0,0 +1,8 @@ +// Copyright (C) 2021 Intel Corporation +// +// SPDX-License-Identifier: MIT + +export interface ImageProcessing { + processImage: (src: ImageData, frameNumber: number) => ImageData; + currentProcessedImage: number|undefined +} diff --git a/cvat-ui/src/utils/opencv-wrapper/opencv-wrapper.ts b/cvat-ui/src/utils/opencv-wrapper/opencv-wrapper.ts index 74394a254009e2264df3c17aeedb2d56b1bc8491..d337e1e6064f3be92bfa1fe07aedd8c98b652927 100644 --- a/cvat-ui/src/utils/opencv-wrapper/opencv-wrapper.ts +++ b/cvat-ui/src/utils/opencv-wrapper/opencv-wrapper.ts @@ -4,6 +4,7 @@ import getCore from 'cvat-core-wrapper'; import waitFor from '../wait-for'; +import HistogramEqualizationImplementation, { HistogramEqualization } from './histogram-equalization'; import IntelligentScissorsImplementation, { IntelligentScissors } from './intelligent-scissors'; @@ -14,6 +15,10 @@ export interface Segmentation { intelligentScissorsFactory: () => IntelligentScissors; } +export interface ImgProc { + hist: () => HistogramEqualization +} + export class OpenCVWrapper { private initialized: boolean; private cv: any; @@ -89,6 +94,15 @@ export class OpenCVWrapper { intelligentScissorsFactory: () => new IntelligentScissorsImplementation(this.cv), }; } + + public get imgproc(): ImgProc { + if (!this.initialized) { + throw new Error('Need to initialize OpenCV first'); + } + return { + hist: () => new HistogramEqualizationImplementation(this.cv), + }; + } } export default new OpenCVWrapper(); diff --git a/site/content/en/docs/manual/advanced/opencv-tools.md b/site/content/en/docs/manual/advanced/opencv-tools.md index ab68f0a17591d7478236c52d03e6e4c16d0ad100..2ccc4ae35026dbd7313d8457a8b9fea34d76e394 100644 --- a/site/content/en/docs/manual/advanced/opencv-tools.md +++ b/site/content/en/docs/manual/advanced/opencv-tools.md @@ -38,3 +38,21 @@ displayed as a red square which is tied to the cursor. - Once all the points are placed, you can complete the creation of the object by clicking on the icon or clicking `N`. As a result, a polygon will be created (read more about the polygons in the [annotation with polygons](/docs/manual/advanced/annotation-with-polygons/)). + +### Histogram Equalization + +Histogram equalization is an CV method that improves contrast in an image in order to stretch out the intensity range. +This method usually increases the global contrast of images when its usable data +is represented by close contrast values. +It is useful in images with backgrounds and foregrounds that are both bright or both dark. + +- First, select the image tab and then click on `histogram equalization` button. + + ![](/images/image221.jpg) + +- Then contrast of current frame will be improved. + If you change frame, it will be equalized too. + You can disable equalization by clicking `histogram equalization` button again. + + ![](/images/image222.jpg) + diff --git a/site/content/en/images/image221.jpg b/site/content/en/images/image221.jpg new file mode 100644 index 0000000000000000000000000000000000000000..eb1bf352eac12215a483d31028b16567595bc46c Binary files /dev/null and b/site/content/en/images/image221.jpg differ diff --git a/site/content/en/images/image222.jpg b/site/content/en/images/image222.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c7ccb7920f6aa728045acb458273741d0104ac74 Binary files /dev/null and b/site/content/en/images/image222.jpg differ