|
public static List<Point> DouglasPeuckerReduction(List<Point> Points, Double Tolerance)
{
double Tolerancesqrd = Tolerance * Tolerance;
if (Points == null || Points.Count < 3)
return Points;
Int32 firstPoint = 0;
Int32 lastPoint = Points.Count - 1;
List<Int32> pointIndexsToKeep = new List<Int32>();
pointIndexsToKeep.Add(firstPoint);
pointIndexsToKeep.Add(lastPoint);
while (lastPoint >= 0 && Points[firstPoint].Equals(Points[lastPoint]))
{
lastPoint--;
}
if (lastPoint == 0) { return Points; }
SortedDictionary<Int32, Int32> PairsIndexesToCheck = new SortedDictionary<Int32, Int32>();
PairsIndexesToCheck.Add(firstPoint, lastPoint);
Int32 checkcnt = 0;
while (PairsIndexesToCheck.Count > 0&& checkcnt< lastPoint)
{
Double maxDistancesqrd = 0;
Int32 indexFarthest = 0, currentFirstPoint= PairsIndexesToCheck.First().Key, currentLastPoint= PairsIndexesToCheck.First().Value;
double deltax = Points[currentFirstPoint].X - Points[currentLastPoint].X,
deltay = Points[currentFirstPoint].Y - Points[currentLastPoint].Y;
Double oneoverbottomsqrd = 1 / (deltax * deltax + deltay * deltay),
x1y2 = Points[currentFirstPoint].X * Points[currentLastPoint].Y,
x2y1 = Points[currentFirstPoint].Y * Points[currentLastPoint].X,
x1y2_diff_x2y1 = x1y2 - x1y2;
;
for (Int32 index = currentFirstPoint+1; index < currentLastPoint; index++)
{
Double distancesqrd = PerpendicularDistance(Points[currentFirstPoint], Points[currentLastPoint], Points[index], oneoverbottomsqrd, x1y2_diff_x2y1, deltax, deltay);
if (distancesqrd > maxDistancesqrd)
{
maxDistancesqrd = distancesqrd;
indexFarthest = index;
}
}
if (maxDistancesqrd > Tolerancesqrd )
{
pointIndexsToKeep.Add(indexFarthest);
PairsIndexesToCheck[currentFirstPoint] = indexFarthest;
PairsIndexesToCheck.Add(indexFarthest, currentLastPoint);
}
else
{
PairsIndexesToCheck.Remove(currentFirstPoint);
}
checkcnt++;
}
if (checkcnt == lastPoint) { return Points; }
List<Point> returnPoints = new List<Point>();
pointIndexsToKeep.Sort();
foreach (Int32 index in pointIndexsToKeep)
{
returnPoints.Add(Points[index]);
}
return returnPoints;
}
<pre>public static Double PerpendicularDistance(Point Point1, Point Point2, Point Point, Double oneoverbottomsqrd, Double x1y2_diff_x2y1, Double deltax, Double deltay)
{
Double area = Math.Abs(x1y2_diff_x2y1 - Point.Y* deltax + Point.X * deltay );
Double areasqrd = area * area;
Double heightsqrd = areasqrd * oneoverbottomsqrd ;
return heightsqrd;
}
|
|
|
|
|
Thanks for making a non-recursive version. You have a typo though
x1y2_diff_x2y1 = x1y2 - x1y2;
should be
x1y2_diff_x2y1 = x1y2 - x2y1;
|
|
|
|
|
public static List<Point> DouglasPeuckerReduction(List<Point> Points, Double Tolerance)
{
if (Points == null || Points.Count < 3)
return Points;
Int32 firstPoint = 0;
Int32 lastPoint = Points.Count - 1;
List<Int32> pointIndexsToKeep = new List<Int32>();
pointIndexsToKeep.Add(firstPoint);
pointIndexsToKeep.Add(lastPoint);
while (lastPoint >= 0 && Points[firstPoint].Equals(Points[lastPoint]))
{
lastPoint--;
}
if (lastPoint == 0) { return Points; }
SortedDictionary<Int32, Int32> PairsIndexesToCheck = new SortedDictionary<Int32, Int32>();
PairsIndexesToCheck.Add(firstPoint, lastPoint);
while (PairsIndexesToCheck.Count > 0)
{
Double maxDistance = 0;
Int32 indexFarthest = 0, currentFirstPoint= PairsIndexesToCheck.First().Key, currentLastPoint= PairsIndexesToCheck.First().Value;
for (Int32 index = currentFirstPoint+1; index < currentLastPoint; index++)
{
Double distance = PerpendicularDistance(Points[currentFirstPoint], Points[currentLastPoint], Points[index]);
if (distance > maxDistance)
{
maxDistance = distance;
indexFarthest = index;
}
}
if (maxDistance > Tolerance )
{
pointIndexsToKeep.Add(indexFarthest);
PairsIndexesToCheck[currentFirstPoint] = indexFarthest;
PairsIndexesToCheck.Add(indexFarthest, currentLastPoint);
}
else
{
PairsIndexesToCheck.Remove(currentFirstPoint);
}
}
List<Point> returnPoints = new List<Point>();
pointIndexsToKeep.Sort();
foreach (Int32 index in pointIndexsToKeep)
{
returnPoints.Add(Points[index]);
}
return returnPoints;
}
|
|
|
|
|
Nice concise code. Here's my VB.NET conversion to whom it may concern:
Public Function GetSimplifyDouglasPeucker(points As List(Of PointD), tolerance As Double) As List(Of PointD)
If points Is Nothing OrElse points.Count < 3 Then Return points
Dim pi1 As Integer = 0
Dim pi2 As Integer = points.Count-1
Dim nli As New List(Of Integer)
nli.Add(pi1)
nli.Add(pi2)
While points(pi1).IsCoincident(points(pi2))
pi2 -= 1
End While
recurseDouglasPeuckerTrimming(points,pi1,pi2,tolerance,nli)
Dim nl As New List(Of PointD)
nli.Sort()
For Each idx As Integer In nli
nl.Add(points(idx))
Next
Return nl
End Function
Private Sub recurseDouglasPeuckerTrimming(points As List(Of PointD), firstpoint As Integer, lastpoint As Integer, tolerance As Double, ByRef pIndexKeep As List(Of Integer))
Dim maxDist As Double = 0
Dim idxFarthest As Integer = 0
Dim distance As Double = 0
For i As Integer = firstpoint To lastpoint-1
distance = GetTriangleHeight(points(firstPoint),points(lastPoint),points(i))
If distance > maxDist Then
maxDist = distance
idxFarthest = i
End If
Next
If maxDist > tolerance AndAlso idxFarthest <> 0 Then
pIndexKeep.Add(idxFarthest)
recurseDouglasPeuckerTrimming(points,firstpoint,idxFarthest,tolerance,pIndexKeep)
recurseDouglasPeuckerTrimming(points,idxFarthest,lastpoint,tolerance,pIndexKeep)
End If
End Sub
Public Function GetTriangleArea(base1 As PointD, base2 As PointD, apex As PointD) As Double
Try
Return Math.Abs(.5*(base1.X*base2.Y + base2.X*apex.Y + apex.X*base1.Y - base2.X*base1.Y - apex.X*base2.Y - base1.X*apex.Y))
Catch
Return Double.NaN
End Try
End Function
Public Function GetTriangleBase(base1 As PointD, base2 As PointD, apex As PointD) As Double
Try
Return ((base1.X-base2.X)^2 + (base1.Y-base2.Y)^2)^0.5
Catch
Return Double.NaN
End Try
End Function
Public Function GetTriangleHeight(base1 As PointD, base2 As PointD, apex As PointD) As Double
Return GetTriangleArea(base1,base2,apex)/GetTriangleBase(base1,base2,apex)*2
End Function
|
|
|
|
|
There is a problem in your code.
The code:
while (Points[firstPoint].Equals(Points[lastPoint]))
{
lastPoint--;
}
Should be executed before:
pointIndexsToKeep.Add(firstPoint);
pointIndexsToKeep.Add(lastPoint);
Try with a polygon with start and end points equal. (closed polygon)
Do we agree?
|
|
|
|
|
Thanks for this comment. It saved me from having to troubleshoot that weird last point being off in some random direction issue. This fix solved the problem. Thanks for saving me a bunch of troubleshooting.
Keith Rule
|
|
|
|
|
This is pretty cool, but could you also convert the DouglasPeuckerN() variant method? Instead of taking a threshold, it takes the exact number of points you need in the output. Most of the code is identical to what you've done already.
Of course, one could TRY to do a search algorithm using the algorithm you converted, but it's very easy for this to fail. I've tried converting the Doublas Peucker variant, but I just can't, my C++ knowledge is too limited.
PLEASE!
|
|
|
|
|
Hi,
I have the same problem and I develop a first version using a max numbers of points to show.
public static List<Point> DouglasPeuckerReduction(List<Point> points, double nPoints)
{
if (points == null || points.Count < 3)
return points;
int firstIndex = 0;
int lastIndex = points.Count - 1;
List<int> pointIndexsToKeep = new List<int>();
pointIndexsToKeep.Add(firstIndex);
pointIndexsToKeep.Add(lastIndex);
while (points[firstIndex].Equals(points[lastIndex]))
lastIndex--;
List<SubPoly> polyList = new List<SubPoly>();
PolyOrderBy listOrder = new PolyOrderBy();
SubPoly subPoly = new SubPoly(firstIndex, lastIndex);
subPoly.KeyInfo = DouglasPeuckerReduction(points, firstIndex, lastIndex);
polyList.Add(subPoly);
while (polyList.Count != 0)
{
subPoly = polyList[0];
polyList.RemoveAt(0);
pointIndexsToKeep.Add(subPoly.KeyInfo.Index);
if (pointIndexsToKeep.Count == nPoints)
break;
SubPoly left = new SubPoly(subPoly.FirstIndex, subPoly.KeyInfo.Index);
left.KeyInfo = DouglasPeuckerReduction(points, left.FirstIndex, left.LastIndex);
if (left.KeyInfo.Index != 0)
polyList.Add(left);
SubPoly right = new SubPoly(subPoly.KeyInfo.Index, subPoly.LastIndex);
right.KeyInfo = DouglasPeuckerReduction(points, right.FirstIndex, right.LastIndex);
if (right.KeyInfo.Index != 0)
polyList.Add(right);
polyList.Sort(listOrder);
}
List<Point> returnPoints = new List<Point>();
pointIndexsToKeep.Sort();
foreach (int index in pointIndexsToKeep)
returnPoints.Add(points[index]);
return returnPoints;
}
private static KeyInfo DouglasPeuckerReduction(List<Point> points, int firstIndex, int lastIndex)
{
double maxDistance = 0;
int indexFarthest = 0;
for (int index = firstIndex; index < lastIndex; index++)
{
double distance = PerpendicularDistance(points[firstIndex], points[lastIndex], points[index]);
if (distance > maxDistance)
{
maxDistance = distance;
indexFarthest = index;
}
}
KeyInfo kInfo = new KeyInfo();
kInfo.Distance = maxDistance;
kInfo.Index = indexFarthest;
return kInfo;
}
private class PolyOrderBy : IComparer<SubPoly>
{
public int Compare(SubPoly x, SubPoly y)
{
return y.KeyInfo.Distance.CompareTo(x.KeyInfo.Distance);
}
}
public class KeyInfo
{
public int Index { get; set; }
public double Distance { get; set; }
}
public class SubPoly
{
public int FirstIndex { get; set; }
public int LastIndex { get; set; }
public KeyInfo KeyInfo { get; set; }
public SubPoly(int firstIndex, int lastIndex)
{
this.FirstIndex = firstIndex;
this.LastIndex = lastIndex;
this.KeyInfo = new KeyInfo();
}
}
Lorenzo
|
|
|
|
|
|
i have an issue regarding tolerance that,i get tolerance from tileX,tileY and zoom level,it works well up to zoom level 15 but after that
"An unhandled exception of type 'System.StackOverflowException' occurred in RDP_ GUI.exe".
this error appear.
|
|
|
|
|
|
I want to use this algorithm to reduce data points on a plot where data is fetched from database to plot. Can someone give an efficient way to do this?
My table has records in millions, and I would like to fetch only a few thousand to plot the chart.
|
|
|
|
|
I have not found the check if point is projected to line which contains segment, but actual projection lies behind segment.
Do you perform such check?
In such case tolerance should be calculated as a distance between point and one of the ends of segment, and should _not_ be considered as a length of the projection to the line.
Without such check wrong behaviour could appear if point is close to the line but far behind segment at the same time.
In such case point could be removed by algorithms, but in fact it should be preserved.
|
|
|
|
|
can you just tell me value about tolerance as default for google maps data and jsut explain about x and y of point? is it lattitude and longitude value?
|
|
|
|
|
In .net there's already System.Drawing.PointF, which I think is precise enough for you demo.
|
|
|
|
|
Great work you have here!
I was deeply interested on it, so as much as that I would like to know if, perhaps, you would be willing to share your work under different licenses. For example, would you like to share your work under the LGPL? In this way I would be able to use it under other applications I have been working lately!
Thanks!
Diego
|
|
|
|
|
First of all: GREAT WORK!
1 bug:
In the public DouglasPeuckerReduction method should have the loop like this:
while (Points[firstPoint].Equals(Points[lastPoint]))
{
lastPoint--;
pointIndexsToKeep.Add(lastPoint);
}
If you do not add the very last point again (and it is a significant point), you loose it.
+1 potential bug:
In the PerpendicaularDistance calculation, System.Drawing.Point is used, where X and Y are Int32. Should X and Y be a not that big number (e.g. 1 million), during calculation one might get easily a bit overflow. It is nicer if X and Y are treated as Int64.
modified 24-Apr-12 5:52am.
|
|
|
|
|
I think you are mistaken on both counts.
1) The loop was eliminating identical points. The fact that your modification adds them into the list of indexes to keep is working against the very purpose of reducing the number of points. Nothing is lost because the original value of lastIndex was already added to the list of indexes to keep, before this loop.
2) I don't see where System.Drawing.Point is being used at all. This code is using a custom class named Point whose X and Y are of type Double.
|
|
|
|
|
I really need it,thank you very much!
|
|
|
|
|
I implemented this in a pet silverlight project. It works well.
|
|
|
|
|
thank you very much for the douglas-peucker algorithm, i have been working on a project that uses that. i have a question, is there way to calculate an optimum epsilon value within the algorithm? or does it have to be user defined?
|
|
|
|
|
I have found some issues with your implementation of this code. here are the changes that i have made.
Double maxDistance = 0;
Int32 indexFarthest = 0;
if (lastPointIndex - firstPointIndex > 1)
{
for (Int32 index = firstPointIndex; index < lastPointIndex; index++)
{
Double distance = PerpendicularDistance(points[firstPointIndex], points[lastPointIndex], points[index]);
if (distance > maxDistance)
{
maxDistance = distance;
indexFarthest = index;
}
}
if (maxDistance > Tolerance && indexFarthest != firstPointIndex)
{
pointIndexsToKeep.Add(indexFarthest);
Reduction(points, firstPointIndex, indexFarthest, ref pointIndexsToKeep);
Reduction(points, indexFarthest, lastPointIndex, ref pointIndexsToKeep);
}
}
|
|
|
|
|
Maybe some god modification ?? check that out
tolerance in % of Maximum distance in plot
private static void DouglasPeuckerReduction(List<PointD> points, Int32 firstPoint, Int32 lastPoint, Double tolerance, Double recursionMaxDistance, ref List<Int32> pointIndexsToKeep)
{
Double maxDistance = 0;
Int32 indexFarthest = 0;
List<PointD> indexError = new List<PointD>();
// count error for new points - take lower error index
List<double> distanceList = new List<double>();
for (Int32 index = firstPoint; index < lastPoint; index++)
{
Double distance = PerpendicularDistance(points[firstPoint], points[lastPoint], points[index]);
distanceList.Add(distance);
if (distance > maxDistance)
{
maxDistance = distance;
indexFarthest = index;
}
}
// check if current recursion distance is greater than passed through args
if (maxDistance > recursionMaxDistance) recursionMaxDistance = maxDistance;
// check relevant tolerance to recursionMaxDistance
if (maxDistance > (recursionMaxDistance * tolerance) / 100 && indexFarthest != 0)
{
//Add the largest point that exceeds the tolerance
pointIndexsToKeep.Add(indexFarthest);
DouglasPeuckerReduction(points, firstPoint, indexFarthest, tolerance, recursionMaxDistance, ref pointIndexsToKeep);
DouglasPeuckerReduction(points, indexFarthest, lastPoint, tolerance, recursionMaxDistance, ref pointIndexsToKeep);
}
}
public static List<PointD> DouglasPeuckerReduction(this List<PointD> Points, Double Tolerance)
{
if (Points == null || Points.Count < 3)
return Points;
Int32 firstPoint = 0;
Int32 lastPoint = Points.Count - 1;
List<Int32> pointIndexsToKeep = new List<Int32>();
//Add the first and last index to the keepers
pointIndexsToKeep.Add(firstPoint);
pointIndexsToKeep.Add(lastPoint);
//The first and the last point can not be the same
while (Points[firstPoint].Equals(Points[lastPoint]))
{
lastPoint--;
}
// start Approx with 0 recursionMaxDistance
DouglasPeuckerReduction(Points, firstPoint, lastPoint, Tolerance,0, ref pointIndexsToKeep);
List<PointD> returnPoints = new List<PointD>();
pointIndexsToKeep.Sort();
foreach (Int32 index in pointIndexsToKeep)
{
returnPoints.Add(Points[index]);
}
return returnPoints;modified on Thursday, March 4, 2010 12:32 PM
|
|
|
|
|
I have used this and it does a nice job of reducing a series of lat/longs for display on maps.
A couple of points for people looking to use the code:
I spent ages trying to hunt down a stack over flow error, before realising that the algo was crashing when passed a tolerance of 0. I'd suggest a simple sanity check of the arguments that get passed is a useful addition.
If you want to reduce the number of points to a useful number for display on Google maps or Bing maps (more than 2000 points in a polyline is slow!), it's quite useful to add a little bit of code to work out a tolerance value to get the right reduction. You could do that with either looking at the point count and picking a relevant tolerance, or interatively running the algo with progressively higher tolerances until the number of points returned was small enough.
|
|
|
|
|
This is awesome! I need to simplify some polygons and your work is a great help.
|
|
|
|
|