import fiona
from fiona.crs import from_epsg
from shapely.geometry import asShape
from coord_convert.transform import gcj2bd
shp_dir = "/Downloads/grid"
grid_input = "grid_join"
grid_output = "grid_code"
max_count = 10000000
# max_count = 10000
coord_precision = 8
crs = from_epsg(4326)
adcode_dict = {}
def run():
collection = fiona.open("{}/{}.shp".format(shp_dir, grid_input), encoding='utf-8')
schema = collection.schema
print(schema)
print(len(collection))
polygons = []
count = 0
features = iter(collection)
feature = next(features)
while count < max_count and feature is not None:
print(count)
geom = asShape(feature['geometry'])
properties = feature['properties']
adcode = properties['adcode']
number = adcode_dict.get(adcode, None)
if number is None:
number = 1
adcode_dict[adcode] = number
else:
number += 1
adcode_dict[adcode] = number
centroid = geom.centroid
coordinates = feature['geometry']['coordinates'][0]
center = get_coord_str([round(centroid.x, coord_precision), round(centroid.y, coord_precision)])
feature['properties'] = {
"tl": get_coord_str(coordinates[0]),
"tr": get_coord_str(coordinates[1]),
"br": get_coord_str(coordinates[2]),
"bl": get_coord_str(coordinates[3]),
"center": center,
"province": properties['province'],
"city": properties['city'],
"county": properties['county'],
"adcode": adcode,
"code": "{}{}".format(adcode, '{:0>6}'.format(number))
}
polygons.append(feature)
count += 1
try:
feature = next(features)
except StopIteration:
break
schema = {
'geometry': 'Polygon',
'properties': {
'tl': 'str',
'tr': 'str',
'br': 'str',
'bl': 'str',
'center': 'str',
'province': 'str',
'city': 'str',
'county': 'str',
'adcode': 'str',
'code': 'str'
},
}
with fiona.open('{}/{}.shp'.format(shp_dir, grid_output), 'w', 'ESRI Shapefile', schema, crs=crs, encoding='utf-8') as c:
for poly in polygons:
c.write(poly)
def get_coord_str(coord):
lng = round(float(coord[0]), coord_precision)
lat = round(float(coord[1]), coord_precision)
bd_lng, bd_lat = gcj2bd(lng, lat)
return "{},{}".format(round(bd_lng, coord_precision), round(bd_lat, coord_precision))
if __name__ == '__main__':
import timeit
start = timeit.default_timer()
run()
end = timeit.default_timer()
print("执行时间:{}".format(str(end - start)))
from app.models import Dataset
from app import db
import fiona
import os
import json
MAX_COUNT = 10000000
project_dir = os.path.dirname(os.path.abspath(__file__))
def run():
json_file_path = 'import_shp.json'
json_file_path = os.path.join(project_dir, json_file_path)
with open(json_file_path, encoding='utf-8') as json_file:
grid_config = json.load(json_file)
shp_dir = grid_config.get('shp_dir', '')
if shp_dir is None:
print("ERROR: shp_dir is None")
return
files = grid_config.get('files', [])
for file in files:
import_file(file, shp_dir)
def import_file(file_opts, shp_dir):
file_name = file_opts.get('name', None)
if file_name is None:
print("ERROR: file_name is None")
return
feature_type = file_opts.get('feature_type', '')
if feature_type is None:
print("ERROR: feature_type is None")
return
print("START import {}".format(file_name))
title = file_opts.get('title', file_name)
description = file_opts.get('description', '')
coordinate = file_opts.get('coordinate', 'gcj')
source = file_opts.get('source', '')
max_count = file_opts.get('max_features', MAX_COUNT)
fields = file_opts.get('fields', None)
_features = []
collection = fiona.open("{}/{}.shp".format(shp_dir, file_name), encoding='utf-8')
schema = collection.schema
feature_type = schema["geometry"].upper()
print(schema)
total = len(collection)
print("features total is {}".format(total))
index = 1
features = iter(collection)
feature = next(features)
max_count = min(max_count, total)
while index <= max_count and feature is not None:
if fields is not None:
properties = feature['properties']
new_properties = {}
for field_label in fields:
field_name = fields.get(field_label)
new_properties[field_label] = properties.get(field_name, '')
feature['properties'] = new_properties
print("{} / {}".format(index, total))
_features.append(feature)
index += 1
try:
feature = next(features)
except StopIteration:
break
feature_count = len(_features)
_dataset = Dataset(title=title, description=description, feature_type=feature_type,
feature_count=feature_count, coordinate=coordinate, source=source)
db.session.add(_dataset)
db.session.commit()
print(_dataset)
_dataset.import_features(_features)
print("COMPLETED import {}".format(file_name))
if __name__ == '__main__':
import timeit
start = timeit.default_timer()
run()
end = timeit.default_timer()
print("执行时间:{}".format(str(end - start)))
{
"shp_dir": "/Downloads/grid",
"files": [
{
"name": "grid_code",
"title": "网格数据",
"description": "全国网格数据",
"coordinate": "wgs84",
"source": "test",
"max_features": 10000000,
"fields": {
"编码": "code",
"省": "province",
"市": "city",
"区县": "county",
"行政区划编码": "adcode",
"左上角": "tl",
"右上角": "tr",
"右下角": "br",
"左下角": "bl",
"中心点": "center"
}
}
]
}
import fiona
from fiona.crs import from_epsg
import math
import os
import json
shp_dir = ""
output_file = ""
min_lng = None
min_lat = None
max_lng = None
max_lat = None
distance = None
R = 6378137
project_dir = os.path.dirname(os.path.abspath(__file__))
def save_file(polygons):
schema = {
'geometry': 'Polygon',
'properties': {
# "index": "int:10"
},
}
crs = from_epsg(4326)
with fiona.open('{}/{}.shp'.format(shp_dir, output_file), 'w', 'ESRI Shapefile', schema, crs=crs,
encoding='utf-8') as c:
for poly in polygons:
c.write(poly)
def run():
if min_lng is None:
print("ERROR: min_lng is None")
if min_lat is None:
print("ERROR: min_lat is None")
if max_lng is None:
print("ERROR: max_lng is None")
if max_lat is None:
print("ERROR: max_lat is None")
if min_lng > max_lng:
print("ERROR: min_lng must less than max_lng")
if min_lat > max_lat:
print("ERROR: min_lat must less than max_lat")
if distance is None:
print("ERROR: distance is None")
index = 0
polygons = []
lng = min_lng
lat = min_lat
lng_list = get_row_lng_list(lng, lat, distance)
print("lng_list: {}".format(len(lng_list)))
lat_list = get_col_lat_list(lng, lat, distance)
print("lat_list: {}".format(len(lat_list)))
for i, lng in enumerate(lng_list):
if i + 1 >= len(lng_list):
break
for j, lat in enumerate(lat_list):
if j + 1 >= len(lat_list):
break
bl_lng = lng_list[i]
bl_lat = lat_list[j]
tr_lng = lng_list[i + 1]
tr_lat = lat_list[j + 1]
geometry = {
"type": "Polygon",
"coordinates": [[
(bl_lng, bl_lat),
(bl_lng, tr_lat),
(tr_lng, tr_lat),
(tr_lng, bl_lat),
(bl_lng, bl_lat)
]]
}
feature = {
"type": "Feature",
"properties": {
# "index": index
},
"geometry": geometry
}
polygons.append(feature)
index += 1
if index % 100 == 0:
print("index: {}".format(index))
print("total polygons: {}".format(str(index - 1)))
save_file(polygons)
def get_col_lat_list(lng, lat, d):
lat_list = [lat]
while lat < max_lat:
lng_2, lat_2 = get_lng_lat(lng, lat, d)
lat_list.append(lat_2)
lat = lat_2
return lat_list
def get_row_lng_list(lng, lat, d):
lng_list = [lng]
while lng < max_lng:
lng_2, lat_2 = get_lng_lat(lng, lat, d)
lng_list.append(lng_2)
lng = lng_2
return lng_list
def get_lng_lat(lng, lat, d):
lng_1 = math.radians(lng)
lat_1 = math.radians(lat)
lng_2 = math.acos(math.cos(d / R)) + lng_1
lng_2 = math.degrees(lng_2)
lat_2 = math.acos(
(math.cos(d / R) - math.sin(lat_1) * math.sin(lat_1)) / (math.cos(lat_1) * math.cos(lat_1))) + lat_1
lat_2 = math.degrees(lat_2)
return lng_2, lat_2
if __name__ == '__main__':
import timeit
json_file_path = 'create_grid.json'
json_file_path = os.path.join(project_dir, json_file_path)
with open(json_file_path, encoding='utf-8') as json_file:
grid_config = json.load(json_file)
shp_dir = grid_config.get('shp_dir')
output_file = grid_config.get('output_file')
min_lng = grid_config.get('min_lng')
max_lng = grid_config.get('max_lng')
min_lat = grid_config.get('min_lat')
max_lat = grid_config.get('max_lat')
distance = grid_config.get('distance')
if "R" in grid_config:
R = grid_config.get('R')
start = timeit.default_timer()
run()
end = timeit.default_timer()
print("执行时间:{}".format(str(end - start)))
{
"shp_dir": "/Downloads",
"output_file": "grid_wgs84.shp",
"min_lng": 0,
"max_lng": 180,
"min_lat": 0,
"max_lat": 90,
"distance": 1,
"R": 6378137
}