未验证 提交 f35ec8c9 编写于 作者: O OpenCV Pushbot 提交者: GitHub

Merge pull request #21935 from Yulv-git:3.4-typos3

......@@ -2290,7 +2290,7 @@ public:
const_iterator begin() const;
const_iterator end() const;
//! template methods for for operation over all matrix elements.
//! template methods for operation over all matrix elements.
// the operations take care of skipping gaps in the end of rows (if any)
template<typename Functor> void forEach(const Functor& operation);
template<typename Functor> void forEach(const Functor& operation) const;
......
......@@ -318,7 +318,7 @@ CV_EXPORTS Ptr<Filter> createColumnSumFilter(int srcType, int dstType, int ksize
/** @brief Performs median filtering for each point of the source image.
@param srcType type of of source image. Only CV_8UC1 images are supported for now.
@param srcType type of source image. Only CV_8UC1 images are supported for now.
@param windowSize Size of the kernerl used for the filtering. Uses a (windowSize x windowSize) filter.
@param partition Specifies the parallel granularity of the workload. This parameter should be used GPU experts when optimizing performance.
......
......@@ -235,7 +235,7 @@ namespace cv { namespace cuda { namespace device
}
__syncthreads();
// For all remaining rows in the median filter, add the values to the the histogram
// For all remaining rows in the median filter, add the values to the histogram
for (int j=threadIdx.x; j<cols; j+=blockDim.x){
for(int i=initStartRow; i<initStopRow; i++){
int pos=::min(i,rows-1);
......
......@@ -791,7 +791,7 @@ namespace cv {
if (layers_vec.size() > 1)
{
// layer ids in layers_vec - inputs of Slice layers
// after adding offset to layers_vec: layer ids - ouputs of Slice layers
// after adding offset to layers_vec: layer ids - outputs of Slice layers
for (size_t k = 0; k < layers_vec.size(); ++k)
layers_vec[k] += layers_vec.size();
......
......@@ -61,7 +61,7 @@ enum Version {
// The version field is always serialized and we will use it to store the
// version that the graph is generated from. This helps us set up version
// control.
// For the IR, we are using simple numbers starting with with 0x00000001,
// For the IR, we are using simple numbers starting with 0x00000001,
// which was the version we published on Oct 10, 2017.
IR_VERSION_2017_10_10 = 0x0000000000000001;
......
......@@ -1537,7 +1537,7 @@ CV_EXPORTS_W void boxFilter( InputArray src, OutputArray dst, int ddepth,
For every pixel \f$ (x, y) \f$ in the source image, the function calculates the sum of squares of those neighboring
pixel values which overlap the filter placed over the pixel \f$ (x, y) \f$.
The unnormalized square box filter can be useful in computing local image statistics such as the the local
The unnormalized square box filter can be useful in computing local image statistics such as the local
variance and standard deviation around the neighborhood of a pixel.
@param src input image
......
......@@ -43,7 +43,7 @@
#ifdef USE_4OPT
//Utility macros for for 1,2,4 channel images:
//Utility macros for 1,2,4 channel images:
// - LOAD4/STORE4 - load/store 4-pixel groups from/to global memory
// - SHUFFLE4_3/SHUFFLE4_5 - rearrange scattered border/central pixels into regular 4-pixel variables
......
......@@ -1015,7 +1015,7 @@ int CV_MinCircleTest::validate_test_results( int test_case_idx )
if( point_count >= 2 && (j < 2 || (j == 2 && cvTsDist(v[0],v[1]) < (radius-1)*2/eps)) )
{
ts->printf( cvtest::TS::LOG,
"There should be at at least 3 points near the circle boundary or 2 points on the diameter\n" );
"There should be at least 3 points near the circle boundary or 2 points on the diameter\n" );
code = cvtest::TS::FAIL_BAD_ACCURACY;
goto _exit_;
}
......
......@@ -126,7 +126,7 @@ bool LogisticRegressionImpl::train(const Ptr<TrainData>& trainData, int)
int num_classes = (int) this->forward_mapper.size();
if(num_classes < 2)
{
CV_Error( CV_StsBadArg, "data should have atleast 2 classes" );
CV_Error( CV_StsBadArg, "data should have at least 2 classes" );
}
// add a column of ones to the data (bias/intercept term)
......
......@@ -334,7 +334,7 @@ GStreamerCapture::~GStreamerCapture()
/*!
* \brief CvCapture_GStreamer::grabFrame
* \return
* Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
* Grabs a sample from the pipeline, awaiting consumation by retrieveFrame.
* The pipeline is started if it was not running yet
*/
bool GStreamerCapture::grabFrame()
......
......@@ -507,21 +507,21 @@ bool IntelPerCStreamDepth::setProperty(int propIdx, double propVal)
}
bool IntelPerCStreamDepth::retrieveDepthAsOutputArray(cv::OutputArray image)
{
return retriveFrame(CV_16SC1, 0, image);
return retrieveFrame(CV_16SC1, 0, image);
}
bool IntelPerCStreamDepth::retrieveIRAsOutputArray(cv::OutputArray image)
{
return retriveFrame(CV_16SC1, 1, image);
return retrieveFrame(CV_16SC1, 1, image);
}
bool IntelPerCStreamDepth::retrieveUVAsOutputArray(cv::OutputArray image)
{
return retriveFrame(CV_32FC2, 2, image);
return retrieveFrame(CV_32FC2, 2, image);
}
bool IntelPerCStreamDepth::validProfile(const PXCCapture::VideoStream::ProfileInfo& pinfo)
{
return (PXCImage::COLOR_FORMAT_DEPTH == pinfo.imageInfo.format);
}
bool IntelPerCStreamDepth::retriveFrame(int type, int planeIdx, cv::OutputArray frame)
bool IntelPerCStreamDepth::retrieveFrame(int type, int planeIdx, cv::OutputArray frame)
{
if (!m_pxcImage.IsValid())
return false;
......
......@@ -85,7 +85,7 @@ public:
protected:
virtual bool validProfile(const PXCCapture::VideoStream::ProfileInfo& pinfo);
protected:
bool retriveFrame(int type, int planeIdx, OutputArray frame);
bool retrieveFrame(int type, int planeIdx, OutputArray frame);
};
class VideoCapture_IntelPerC : public IVideoCapture
......
......@@ -1858,7 +1858,7 @@ static int icvSetPropertyCAM_V4L(CvCaptureCAM_V4L* capture, int property_id, dou
retval = icvSetControl(capture, property_id, value);
}
/* return the the status */
/* return the status */
return retval;
}
......
......@@ -261,7 +261,7 @@ public:
if (cvtest::debugLevel > 0)
std::cout << "i = " << i << ": timestamp = " << timestamp << std::endl;
const double frame_period = 1000.f/bunny_param.getFps();
// NOTE: eps == frame_period, because videoCapture returns frame begining timestamp or frame end
// NOTE: eps == frame_period, because videoCapture returns frame beginning timestamp or frame end
// timestamp depending on codec and back-end. So the first frame has timestamp 0 or frame_period.
EXPECT_NEAR(timestamp, i*frame_period, frame_period) << "i=" << i;
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册