本文整理汇总了Python中shapely.geometry.mapping函数的典型用法代码示例。如果您正苦于以下问题:Python mapping函数的具体用法?Python mapping怎么用?Python mapping使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了mapping函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: delineate
def delineate(bfe_filename, bfe_elev_field, contour_filename, contour_elev_field, out_filename):
# Import contours ------------------------------------
print 'Importing contours... '
now = dt.now()
contours = ad.import_contours(contour_filename, contour_elev_field, chatty=True)
crs = ad.get_crs(contour_filename)
time_diff = dt.now() - now
print 'Imported', len(contours), 'contours in', time_diff, 'seconds'
# Import BFEs ----------------------------------------------
bfes = ad.ez_bfe_import(bfe_filename, bfe_elev_field)
num_bfes = len(bfes)-1
print 'Imported bfes from', bfes[0].elevation, 'to', bfes[-1].elevation
# Delineate ---------------------------------------------
now = dt.now()
left_lines, right_lines = ad.delineate_by_bfes(bfes, contours)
time_diff = dt.now() - now
print num_bfes, 'bfe pairs completed in ', time_diff
print time_diff/(num_bfes), 'seconds per bfe pair'
# Export to shapefile
schema = {'geometry': 'LineString', 'properties': {'status': 'str:25'}}
with fiona.open(out_filename, 'w', driver='ESRI Shapefile', crs=crs, schema=schema) as out:
for left in left_lines:
out.write({'geometry': mapping(left.shapely_geo), 'properties': {'status': left.status}})
for right in right_lines:
out.write({'geometry': mapping(right.shapely_geo), 'properties': {'status': right.status}})
print 'Finished exporting to shapefile'
示例2: test_rasterize
def test_rasterize():
geodict = GeoDict({'xmin':0.5,'xmax':3.5,
'ymin':0.5,'ymax':3.5,
'dx':1.0,'dy':1.0,
'ny':4,'nx':4})
print('Testing rasterizeFromGeometry() burning in values from a polygon sequence...')
#Define two simple polygons and assign them to shapes
poly1 = [(0.25,3.75),(1.25,3.25),(1.25,2.25)]
poly2 = [(2.25,3.75),(3.25,3.75),(3.75,2.75),(3.75,1.50),(3.25,0.75),(2.25,2.25)]
shape1 = {'properties':{'value':5},'geometry':mapping(Polygon(poly1))}
shape2 = {'properties':{'value':7},'geometry':mapping(Polygon(poly2))}
shapes = [shape1,shape2]
print('Testing burning in values where polygons need not contain pixel centers...')
grid = Grid2D.rasterizeFromGeometry(shapes,geodict,fillValue=0,attribute='value',mustContainCenter=False)
output = np.array([[5,5,7,7],
[5,5,7,7],
[0,0,7,7],
[0,0,0,7]])
np.testing.assert_almost_equal(grid.getData(),output)
print('Passed burning in values where polygons need not contain pixel centers.')
print('Testing burning in values where polygons must contain pixel centers...')
grid2 = Grid2D.rasterizeFromGeometry(shapes,geodict,fillValue=0,attribute='value',mustContainCenter=True)
output = np.array([[5,0,7,0],
[0,0,7,7],
[0,0,0,7],
[0,0,0,0]])
np.testing.assert_almost_equal(grid2.getData(),output)
print('Passed burning in values where polygons must contain pixel centers.')
示例3: make_shapefile
def make_shapefile(data, name):
path = os.path.join('op_data',name + '.shp')
crs = crs = from_epsg('29902')
if type(data) == dict:
a_schema = {'geometry': 'Point',
'properties': {'name':'str', 'address':'str'}
}
with fiona.open(path, "w",
driver= 'ESRI Shapefile',
crs= crs,
schema= a_schema) as output:
for k, v in data.items():
parts = k.split(',')
name = parts[0]
output.write({
'properties':{'name':name, 'address':k},
'geometry':geometry.mapping(v)})
else:
geom_type = data.geom_type
a_schema = {'geometry': geom_type,
'properties': {'name':'str'}
}
with fiona.open(path, "w",
driver= 'ESRI Shapefile',
crs= crs,
schema= a_schema) as output:
output.write({
'properties':{'name':name},
'geometry':geometry.mapping(data)})
示例4: apply_shapely
def apply_shapely(self, method, args=None, call=True, out_geomtype=None,
**kwargs):
coll = self.collection()
out_schema = coll.schema.copy()
if not args:
args = []
if out_geomtype:
out_schema['geometry'] = out_geomtype
tempds = self.tempds(method)
with fiona.collection(tempds, "w", "ESRI Shapefile",
out_schema, crs=self.crs) as out_collection:
for in_feature in coll:
out_feature = in_feature.copy()
if call:
geom = mapping(
getattr(shape(in_feature['geometry']),
method)(*args, **kwargs)
)
else:
# it's not a method, it's a property
geom = mapping(
getattr(shape(in_feature['geometry']), method)
)
out_feature['geometry'] = geom
out_collection.write(out_feature)
return Layer(tempds)
示例5: export_endpoints
def export_endpoints(shapefile, cross_sections, driver=None, epsg=None):
# crs from epsg code
if epsg is not None:
crs = from_epsg(epsg)
else:
crs = None
# schema
schema = {'geometry': 'Point', 'properties': {'label': 'str'}}
# shapefile write arguments
shape_kwargs = {
'driver': driver,
'schema': schema,
'crs': crs,
}
log.info('writing to {f:}'.format(f=os.path.basename(shapefile)))
with fiona.open(shapefile, 'w', **shape_kwargs) as dst:
for cs in cross_sections:
startpoint = Point(cs.shape.coords[0])
endpoint = Point(cs.shape.coords[-1])
dst.write({
'geometry': mapping(startpoint),
'properties': {'label': cs.label}
})
dst.write({
'geometry': mapping(endpoint),
'properties': {'label': cs.label + '`'}
})
示例6: main
def main(infile, outfile, driver):
with fio.open(infile) as src:
meta = src.meta
meta['driver'] = driver
with fio.open(infile) as src, fio.open(outfile, 'w', **meta) as dst:
with click.progressbar(src) as features:
for feat in features:
east = deepcopy(feat)
west = deepcopy(feat)
east_geom = shape(east['geometry'])
west_geom = shape(west['geometry'])
# if 'Point' not in asShape(feat['geometry']).type:
# east_geom = east_geom.simplify(0.0001).buffer(0)
# west_geom = west_geom.simplify(0.0001).buffer(0)
east_geom = translate(east_geom, xoff=180)
west_geom = translate(west_geom, xoff=-180)
if not east_geom.is_empty:
east['geometry'] = mapping(east_geom)
dst.write(east)
if not west_geom.is_empty:
west['geometry'] = mapping(west_geom)
dst.write(west)
示例7: iterfeatures
def iterfeatures(self, na='null', show_bbox=False):
"""
Returns an iterator that yields feature dictionaries that comply with
__geo_interface__
Parameters
----------
na : {'null', 'drop', 'keep'}, default 'null'
Indicates how to output missing (NaN) values in the GeoDataFrame
* null: ouput the missing entries as JSON null
* drop: remove the property from the feature. This applies to
each feature individually so that features may have
different properties
* keep: output the missing entries as NaN
show_bbox : include bbox (bounds) in the geojson. default False
"""
if na not in ['null', 'drop', 'keep']:
raise ValueError('Unknown na method {0}'.format(na))
ids = np.array(self.index, copy=False)
geometries = np.array(self[self._geometry_column_name], copy=False)
properties_cols = self.columns.difference([self._geometry_column_name])
if len(properties_cols) > 0:
# convert to object to get python scalars.
properties = self[properties_cols].astype(object).values
if na == 'null':
properties[pd.isnull(self[properties_cols]).values] = None
for i, row in enumerate(properties):
geom = geometries[i]
if na == 'drop':
properties_items = dict((k, v) for k, v
in zip(properties_cols, row)
if not pd.isnull(v))
else:
properties_items = dict((k, v) for k, v
in zip(properties_cols, row))
feature = {'id': str(ids[i]),
'type': 'Feature',
'properties': properties_items,
'geometry': mapping(geom) if geom else None}
if show_bbox:
feature['bbox'] = geom.bounds if geom else None
yield feature
else:
for fid, geom in zip(ids, geometries):
feature = {'id': str(fid),
'type': 'Feature',
'properties': {},
'geometry': mapping(geom) if geom else None}
if show_bbox:
feature['bbox'] = geom.bounds if geom else None
yield feature
示例8: create_t6_deserts
def create_t6_deserts(desert_geom, b_box, mask_metadata):
""""""
geom_list = list()
with fiona.open(t6_block_groups) as block_groups:
t6_metadata = block_groups.meta.copy()
with fiona.open(t6_desert_feats, 'w', **t6_metadata) as t6_deserts:
for bg in block_groups:
geom = shape(bg['geometry'])
props = bg['properties']
# 'neither' is misspelled in dataset so (sic)
if props['min_pov'] != 'niether' and \
geom.intersects(desert_geom):
geom_list.append(geom)
new_geom = geom.intersection(desert_geom)
bg['geometry'] = mapping(new_geom)
t6_deserts.write(bg)
t6_geom = unary_union(geom_list)
t6_desert_geom = t6_geom.intersection(desert_geom)
t6_mask_geom = b_box.difference(t6_desert_geom)
with fiona.open(t6_desert_mask, 'w', **mask_metadata) as t6_mask:
feat = {
'geometry': mapping(t6_mask_geom),
'properties': {
'id': 1
}
}
t6_mask.write(feat)
示例9: olson_transform
def olson_transform(geojson, scale_values):
"""
Scale GeoJSON features.
Inplace scaling transformation of each polygon of the geojson provided
according to the "scale values" also provided.
Parameters
----------
geojson: dict
The geojson of polygon to transform
(it might be useful to have choosen an appropriate projection as we
want to deal with the area)
scale_values: list
The pre-computed scale values for olson transformation
(1 = no transformation)
Returns
-------
Nothing
"""
if len(geojson["features"]) != len(scale_values):
raise ValueError("Inconsistent number of features/values")
for val, feature in zip(scale_values, geojson['features']):
geom = shape(feature["geometry"])
feature['properties']['ref_area'] = geom.area
if hasattr(geom, '__len__'):
feature["geometry"] = mapping(
MultiPolygon([scale(g, xfact=val, yfact=val) for g in geom]))
else:
feature["geometry"] = mapping(scale(geom, xfact=val, yfact=val))
geojson['features'].sort(
key=lambda x: x['properties']['ref_area'], reverse=True)
示例10: get_employers_near_stops
def get_employers_near_stops():
""""""
distance = 5280 / 2
filter_field = 'LINE'
filter_vals = ['O']
stop_buffers = dict()
stop_names = dict()
with fiona.open(RAIL_STOP) as rail_stop:
for fid, feat in rail_stop.items():
fields = feat['properties']
if fields[filter_field] in filter_vals:
geom = shape(feat['geometry'])
feat['geometry'] = mapping(geom.buffer(distance))
stop_buffers[fid] = feat
stop_names[fid] = fields['STATION']
wb = load_workbook(GEOCODED)
ws = wb.worksheets[0]
header = [str(cell.value) for cell in ws.rows[0]]
x_ix = header.index('X Coordinate')
y_ix = header.index('Y Coordinate')
employers = dict()
for i, row in enumerate(ws.iter_rows(row_offset=1)):
x, y = row[x_ix].value, row[y_ix].value
if x and y:
feat = dict()
x, y = float(x), float(y)
feat['geometry'] = mapping(Point(x, y))
field_vals = [cell.value for cell in row]
feat['properties'] = OrderedDict(zip(header, field_vals))
employers[i] = feat
join_mapping = spatial_join(stop_buffers, employers)
with open(EMP_STATIONS, 'wb') as emp_stations:
emp_writer = csv.writer(emp_stations)
station_header = ['Stations'] + header
emp_writer.writerow(station_header)
for i, row in enumerate(ws.iter_rows(row_offset=1)):
if i in join_mapping:
station_ids = join_mapping[i]
station_str = ', '.join([stop_names[sid] for sid in station_ids])
csv_row = [station_str]
for cell in row:
value = cell.value
if isinstance(value, unicode):
value = value.encode('utf-8')
csv_row.append(value)
emp_writer.writerow(csv_row)
示例11: __init__
def __init__(self, data_footprint: List[Tuple[float, float]]):
"""Construct data and tile footprints using the points from product metadata
Points are assumed to be in ll, lr, ur, ul order
"""
data_poly = normalize_footprint(data_footprint + [data_footprint[0]])
tile_poly = MultiPolygon([x.envelope for x in data_poly])
data_polygon = mapping(data_poly)
tile_polygon = mapping(tile_poly)
self.data_polygon = data_polygon
self.tile_polygon = tile_polygon
示例12: get_tile_geometry
def get_tile_geometry(path, origin_espg, tolerance=500):
""" Calculate the data and tile geometry for sentinel-2 tiles """
with rasterio.open(path) as src:
# Get tile geometry
b = src.bounds
tile_shape = Polygon([(b[0], b[1]), (b[2], b[1]), (b[2], b[3]), (b[0], b[3]), (b[0], b[1])])
tile_geojson = mapping(tile_shape)
# read first band of the image
image = src.read(1)
# create a mask of zero values
mask = image == 0.
# generate shapes of the mask
novalue_shape = shapes(image, mask=mask, transform=src.affine)
# generate polygons using shapely
novalue_shape = [Polygon(s['coordinates'][0]) for (s, v) in novalue_shape]
if novalue_shape:
# Make sure polygons are united
# also simplify the resulting polygon
union = cascaded_union(novalue_shape)
# generates a geojson
data_shape = tile_shape.difference(union)
# If there are multipolygons, select the largest one
if data_shape.geom_type == 'MultiPolygon':
areas = {p.area: i for i, p in enumerate(data_shape)}
largest = max(areas.keys())
data_shape = data_shape[areas[largest]]
# if the polygon has interior rings, remove them
if list(data_shape.interiors):
data_shape = Polygon(data_shape.exterior.coords)
data_shape = data_shape.simplify(tolerance, preserve_topology=False)
data_geojson = mapping(data_shape)
else:
data_geojson = tile_geojson
# convert cooridnates to degrees
return (to_latlon(tile_geojson, origin_espg), to_latlon(data_geojson, origin_espg))
示例13: get_ibound_from_huc
def get_ibound_from_huc(huc_data_shp=None,ibound_output_shp=None,ibound_huc_scale=None,extents_huc_list=None):
'''Reduces the shapefile used for the ibound zonation scheme to only those
polygons (i.e., HUCs) that are in the active model area. For higher resolution
zonation (i.e., using smaller HUCs for the IBOUND zonation scheme) this is much
faster than clipping the zonation shapefile before rasterizing.'''
print '\nReducing the IBOUND zonation shapefile to the active model area.\n'
zone_id_field = 'HUC' + str(ibound_huc_scale)
with fiona.open(huc_data_shp,'r') as vin:
driver,crs,schema = vin.driver,vin.crs,vin.schema
schema['properties'] = {zone_id_field:'str'}
with fiona.open(ibound_output_shp,'w',driver=driver,crs=crs,schema=schema) as vout:
for feature in vin:
izone = int(feature['properties']['HUC' + str(ibound_huc_scale)])
check_izone = str(izone).zfill(int(ibound_huc_scale))
for ihuc in extents_huc_list:
if (check_izone.startswith(ihuc)):
igeometry = shape(feature['geometry'])
vout.write({'geometry': mapping(igeometry),'properties':{zone_id_field:izone}})
return
示例14: feature
def feature(i, row):
return {
"id": str(i),
"type": "Feature",
"properties": dict((k, v) for k, v in iteritems(row) if k != "geometry"),
"geometry": mapping(row["geometry"]),
}
示例15: get_extents_from_huc
def get_extents_from_huc(huc_data_shp=None,extents_output_shp=None,extents_huc_list=None):
'''Extracts a user-specified HUC or list of HUCs from the national dataset and writes it
to a shapefile. 'huc_data_shp'=shapefile that includes the huc polygons
that will be extracted.'''
extents_huc_scale = len(extents_huc_list[0])
huc_field = 'HUC' + str(extents_huc_scale)
with fiona.open(huc_data_shp) as vin:
schema = vin.schema
crs = vin.crs
driver = vin.driver
# Reduce the extract schema to only the huc id field
schema['properties'] = {huc_field:'str'}
# Now write the model domain shapefile
with fiona.open(huc_data_shp) as vect_in:
polygon_list = []
for feature in vect_in:
if (feature['properties'][huc_field] in extents_huc_list):
polygon_list.append(shape(feature['geometry']))
merged = unary_union(polygon_list)
with fiona.open(extents_output_shp,'w',driver=driver,crs=crs,schema=schema) as extract_out:
extract_out.write({'geometry': mapping(merged),'properties':{huc_field:'Merged'}})
return