/usr/include/OTB-5.8/otbSimplifyPathFunctor.h is in libotb-dev 5.8.0+dfsg-3.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 | /*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#ifndef otbSimplifyPathFunctor_h
#define otbSimplifyPathFunctor_h
#include "otbMath.h"
namespace otb
{
/** \class SimplifyPathFunctor
* \brief This filter performs a simplification of the input path.
*
* It reduces the number of vertices in each path, according to a tolerance criterion. It aims at
* removing aligned vertices while keeping sharp angular points.
*
* In order to ensure the unicity of its output, each path is considered first from begin to end, then
* from begin to the first vertex before the end. At each step, the consistency of the path is checked :
* the equation of the line passing by the first and last vertices is computed. Then, for each
* vertices between them, the euclidean distance to this line is computed. If for one vertex, this distance
* is upper than the tolerance threshold, the path is considered to be inconsistent and no vertices can be removed.
*
* If the path is considered consistent (which will occur at least with a 2 vertices path),
* only the beginning and ending
* vertices are kept and a new search iteration begin at its end.
*
* This filter is part of the road extraction framework.
*
* \sa UnaryFunctorObjectListFilter
*
* \ingroup Functor
*
* \ingroup OTBVectorDataManipulation
*/
template <class TInput, class TOutput>
class SimplifyPathFunctor
{
public:
typedef typename TInput::VertexListType::ConstIterator VertexListConstIteratorType;
typedef typename TInput::VertexListType::ConstPointer VertexListConstPointerType;
typedef TOutput OutputPathType;
typedef typename OutputPathType::Pointer OutputPathPointerType;
void SetTolerance(double Tolerance)
{
m_Tolerance = Tolerance;
}
double GetTolerance(void) const
{
return (m_Tolerance);
}
SimplifyPathFunctor()
{
m_Tolerance = 1.0;
}
~SimplifyPathFunctor() {}
inline OutputPathPointerType operator ()(const TInput * input)
{
OutputPathPointerType newPath = OutputPathType::New();
newPath->Initialize();
// Getting the verices list of the current input paths
VertexListConstPointerType vertexList = input->GetVertexList();
if(vertexList->Size()>0)
{
VertexListConstIteratorType beginIt = vertexList->Begin();
VertexListConstIteratorType beforeTheEndIt = --(vertexList->End());
// Add the first vertex
newPath->AddVertex(beginIt.Value());
while (beginIt != beforeTheEndIt)
{
VertexListConstIteratorType endIt = beforeTheEndIt;
// while the segment is not consistent, decrement endIt
while (!this->TestPathConsistency(beginIt, endIt))
{
--endIt;
}
// Add the final vertex
newPath->AddVertex(endIt.Value());
beginIt = endIt;
}
}
newPath->SetMetaDataDictionary(input->GetMetaDataDictionary());
return newPath;
}
private:
double m_Tolerance;
bool TestPathConsistency(VertexListConstIteratorType begin,
VertexListConstIteratorType end) const
{
VertexListConstIteratorType segmentIt = begin;
++segmentIt;
//Compute the distance of a point to a segment based on the cross product
while (segmentIt != end)
{
double crossProduct = (end.Value()[0] - begin.Value()[0]) * (segmentIt.Value()[1] - begin.Value()[1])
- (end.Value()[1] - begin.Value()[1]) * (segmentIt.Value()[0] - begin.Value()[0]);
double lengthSeg = (end.Value()[0] - begin.Value()[0]) * (end.Value()[0] - begin.Value()[0])
+ (end.Value()[1] - begin.Value()[1]) * (end.Value()[1] - begin.Value()[1]);
if (lengthSeg == 0) return false;
double distsq = crossProduct * crossProduct / lengthSeg;
if (distsq > static_cast<double>(m_Tolerance))
{
return false;
}
++segmentIt;
}
return true;
}
};
}
#endif
|