Quiero usar OGR en Python para escribir una simple prueba de punto en polígono:
def point_in_polygon(point, polygon):
"""
point : [longitude, latitude]
polygon : [(lon1, lat1), (lon2, lat2), ..., (lonn, latn)]
"""
# Create spatialReference
spatialReference = osgeo.osr.SpatialReference()
spatialReference.SetWellKnownGeogCS("WGS84")
# Create ring
ring = osgeo.ogr.Geometry(osgeo.ogr.wkbLinearRing)
# Add points
for lon, lat in polygon:
ring.AddPoint(lon, lat)
# Create polygon
poly = osgeo.ogr.Geometry(osgeo.ogr.wkbPolygon)
poly.AssignSpatialReference(spatialReference)
poly.AddGeometry(ring)
# Create point
pt = osgeo.ogr.Geometry(osgeo.ogr.wkbPoint)
pt.AssignSpatialReference(spatialReference)
pt.SetPoint(0, point[0], point[1])
# test
return pt.Within(poly)
Sin embargo, no parece funcionar correctamente:
In [22]: polygon = [(-10., -10.), (-10., 10.), (10., 10.), (10., -10.), (-10., -10.)]
In [23]: point_in_polygon([0., 0.], polygon)
Out[23]: True
In [24]: point_in_polygon([359., 0.], polygon)
Out[24]: False
¿Alguna idea de lo que me falta?