Commit 17f85f7f authored by Danijel Schorlemmer's avatar Danijel Schorlemmer
Browse files

Output of building polygons and full exposure data implemented

parent 8f14b17f
Pipeline #19129 failed with stage
in 1 minute and 5 seconds
......@@ -59,8 +59,12 @@ def create_sara_database(database_filepath):
# Create LOCATIONS table
conn.execute('''CREATE TABLE LOCATIONS
(IDX INTEGER PRIMARY KEY AUTOINCREMENT,
NAME TEXT);''')
sql_statement = "SELECT AddGeometryColumn('LOCATIONS', 'geometry', 4326, 'POINT', 'XY');"
occupancy TEXT,
ID INTEGER,
NAME TEXT,
longitude REAL,
latitude REAL);''')
sql_statement = "SELECT AddGeometryColumn('LOCATIONS', 'geom', 4326, 'POINT', 'XY');"
conn.execute(sql_statement)
# sql_statement = "SELECT AddGeometryColumn('LOCATIONS', 'voronoi', 4326, 'POLYGON', 'XY');"
# conn.execute(sql_statement)
......@@ -90,8 +94,9 @@ def create_sara_database(database_filepath):
return conn
def read_sara_exposure_into_database(conn, sara_exposure_filepath):
def read_sara_exposure_into_database(conn, sara_exposure_filepath, exposure_occupancy):
cur = conn.cursor()
with open(sara_exposure_filepath) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
next(csv_reader) # Skip first line
......@@ -108,21 +113,29 @@ def read_sara_exposure_into_database(conn, sara_exposure_filepath):
# Create new location entry
location_count += 1
location = "GeomFromText('POINT(%f %f)', 4326)" % (float(row[0]), float(row[1]))
sql_statement = "INSERT INTO LOCATIONS (geometry, NAME) VALUES (%s, '%s')" % (location, row[3])
print("SQL statement: " + sql_statement)
conn.execute(sql_statement)
conn.commit()
print("Location created")
sql_statement = "INSERT INTO LOCATIONS (occupancy, geom, ID, NAME, longitude, latitude) VALUES ('%s', %s, %d, '%s', %f, %f)" % (exposure_occupancy, location, int(row[2]), row[3], float(row[0]), float(row[1]))
# print("SQL statement: " + sql_statement)
cur.execute(sql_statement)
# conn.commit()
print("Location %d created" % location_count)
sql_statement = 'INSERT INTO ASSETS (LOCATION, TAXONOMY, NUMBER, STRUCTURAL, NIGHT) VALUES (' \
+ str(location_count) + ', """' + row[5] + '""", ' + row[6] + ', ' + row[7] + ', ' + row[8] + ')'
# print("SQL statement: " + sql_statement)
cur.execute(sql_statement)
# conn.commit()
# print("Asset created")
asset_count += 1
last_longitude = row[0]
last_latitude = row[1]
else:
sql_statement = 'INSERT INTO ASSETS (LOCATION, TAXONOMY, NUMBER, STRUCTURAL, NIGHT) VALUES (' \
+ str(location_count) + ', """' + row[5] + '""", ' + row[6] + ', ' + row[7] + ', ' + row[8] + ')'
print("SQL statement: " + sql_statement)
conn.execute(sql_statement)
conn.commit()
print("Asset created")
# print("SQL statement: " + sql_statement)
cur.execute(sql_statement)
# conn.commit()
# print("Asset created")
asset_count += 1
conn.commit()
print("Number of assets: " + str(asset_count))
......@@ -140,40 +153,40 @@ def add_voronoi_cells(conn):
cur.execute(sql)
print("Table Voronoi created")
cur.execute("""SELECT ST_AsText(ST_VoronojDiagram(ST_Collect(geometry))) FROM LOCATIONS""")
cur.execute("""SELECT ST_AsText(ST_VoronojDiagram(ST_Collect(geom))) FROM LOCATIONS""")
conn.commit()
diag = cur.fetchall()
multipolygon = diag[0][0]
print(multipolygon)
# print(multipolygon)
p1 = wkt.loads(multipolygon)
for polygon in p1: # same for multipolygon.geoms
print(polygon)
# print(polygon)
location = "GeomFromText('%s', 4326)" % polygon
sql_statement = "INSERT INTO Voronoi (geom) VALUES (%s)" % location
print("SQL statement: " + sql_statement)
conn.execute(sql_statement)
conn.commit()
# print("SQL statement: " + sql_statement)
cur.execute(sql_statement)
conn.commit()
# Loop over points in Locations
sql_statement = "SELECT idx, ST_AsText(geometry) FROM LOCATIONS;"
print(sql_statement)
sql_statement = "SELECT idx, ST_AsText(geom) FROM LOCATIONS;"
# print(sql_statement)
cur.execute(sql_statement)
rows = cur.fetchall()
for row in rows:
print(" ")
print("Point")
print(row)
# print(" ")
# print("Point")
# print(row)
location_index = row[0]
print("Location index: " + str(location_index))
print("Point coordinates: " + str(row[1]))
# print("Point coordinates: " + str(row[1]))
sql_statement = "SELECT ST_AsText(geom) as voro FROM voronoi where within(GeomFromText('%s', 4326), voronoi.geom);" % row[1]
print(sql_statement)
# print(sql_statement)
cur.execute(sql_statement)
polys = cur.fetchall()
print(polys[0][0])
# print(polys[0][0])
# Add Voronoi polygon to LOCATIONS table
sql_statement = "UPDATE Voronoi SET loc_id = %d WHERE geom = GeomFromText('%s', 4326);" % (location_index, polys[0][0])
print(sql_statement)
# print(sql_statement)
cur.execute(sql_statement)
conn.commit()
......@@ -195,9 +208,11 @@ def add_buildings(conn, building_filepath):
sql = "SELECT AddGeometryColumn('Buildings', "
sql += "'centroid', 4326, 'POINT', 'XY')"
cur.execute(sql)
conn.commit()
print("Table Buildings created")
buildings = geopandas.read_file(building_filepath)
num_building = 0
for index, row in buildings.iterrows(): # Looping over all points
sql_statement = "SELECT loc_id, geom FROM voronoi where within(GeomFromText('%s', 4326), voronoi.geom);" % row.geometry.centroid
# print(sql_statement)
......@@ -213,14 +228,14 @@ def add_buildings(conn, building_filepath):
polys = cur.fetchall()
location_index = polys[0][0]
print(location_index)
quadkey = Tile.for_latitude_longitude(longitude=float(row.geometry.centroid.x), latitude=float(row.geometry.centroid.y), zoom=18).quad_tree
building_geometry = "GeomFromText('%s', 4326)" % (row.geometry)
sql_statement = "INSERT INTO Buildings (loc_id, quadkey, geom, centroid) VALUES (%d, %s, %s)" % (location_index, quadkey, building_geometry, centroid)
print("SQL statement: " + sql_statement)
conn.execute(sql_statement)
conn.commit()
building_geometry = "GeomFromText('%s', 4326)" % row.geometry
centroid_geometry = "GeomFromText('%s', 4326)" % centroid
sql_statement = "INSERT INTO Buildings (loc_id, quadkey, geom, centroid) VALUES (%d, %s, %s, %s)" % (location_index, quadkey, building_geometry, centroid_geometry)
num_building += 1
print("Building %d added to cell %d " % (num_building, location_index))
cur.execute(sql_statement)
conn.commit()
def classify_buildings(conn):
......@@ -229,6 +244,7 @@ def classify_buildings(conn):
cur = conn.cursor()
# Create the Buildings table
sql = 'CREATE TABLE Exposure ('
sql += 'idx INTEGER PRIMARY KEY AUTOINCREMENT,'
sql += 'building_id INTEGER, '
sql += 'taxonomy TEXT, '
sql += 'NUMBER REAL, '
......@@ -248,40 +264,40 @@ def classify_buildings(conn):
sql_statement = "SELECT Taxonomy, Number, Structural, Night FROM Assets WHERE Location = %d" % row[0]
cur.execute(sql_statement)
assets = cur.fetchall()
for asset in assets:
print("Asset: %s, %f, %f, %f" % (asset[0], asset[1], asset[2], asset[3]))
# for asset in assets:
# print("Asset: %s, %f, %f, %f" % (asset[0], asset[1], asset[2], asset[3]))
taxonomies = numpy.array(assets)[:, 0]
print(taxonomies)
# print(taxonomies)
a = numpy.asarray(assets)[:, 1:].astype(numpy.float)
print(a)
# print(a)
sum = a.sum(axis=0)
print(sum)
# print(sum)
proportions = a/sum[None, :]
print(proportions)
# print(proportions)
proportions[numpy.isnan(proportions)] = 0
print(proportions)
# print(proportions)
# Select all buildings in the Voronoi cell
sql_statement = "SELECT idx FROM Buildings WHERE loc_id = %d" % row[0]
print(sql_statement)
# print(sql_statement)
cur.execute(sql_statement)
buildings = cur.fetchall()
for building_index in buildings:
# Write exposure data for the building
print(building_index[0])
# print(building_index[0])
for counter in range(taxonomies.shape[0]):
print(taxonomies[counter])
print(proportions[counter,:])
# print(taxonomies[counter])
# print(proportions[counter,:])
sql_statement = "INSERT INTO Exposure (building_id, taxonomy, number, structural, night) VALUES (%d, %s, %f, %f, %f)" % (building_index[0], taxonomies[counter], proportions[counter,0], proportions[counter,0], proportions[counter,0])
sql_statement = "INSERT INTO Exposure (building_id, taxonomy, number, structural, night) VALUES (%d, %s, %f, %f, %f)" % (building_index[0], taxonomies[counter], proportions[counter,0], proportions[counter,1], proportions[counter,2])
cur.execute(sql_statement)
conn.commit()
conn.commit()
def output_tiles(conn, tile_filepath):
with open(tile_filepath, mode='w') as tiles_file:
fieldnames = ['origin_id', 'geometry']
writer = csv.DictWriter(tiles_file, fieldnames=fieldnames)
writer = csv.DictWriter(tiles_file, fieldnames=fieldnames, delimiter=';')
writer.writeheader()
cur = conn.cursor()
......@@ -292,26 +308,68 @@ def output_tiles(conn, tile_filepath):
for quadkey in quadkeys:
print(quadkey[0])
tile = Tile.from_quad_tree(quadkey[0])
print(tile)
print(tile.bounds[0].latitude)
# print(tile)
# print(tile.bounds[0].latitude)
tile_polygon = box(tile.bounds[0].longitude, tile.bounds[0].latitude, tile.bounds[1].longitude, tile.bounds[1].latitude)
writer.writerow({'origin_id': 'Cell_ ' + quadkey[0], 'geometry': tile_polygon.wkt})
writer.writerow({'origin_id': 'cell_' + quadkey[0], 'geometry': tile_polygon.wkt})
def output_building_polygons(conn, polygon_filepath):
def output_exposure(output_filepath):
with open(polygon_filepath, mode='w') as polygons_file:
fieldnames = ['OSM_ID', 'geometry', 'cell_ID']
writer = csv.DictWriter(polygons_file, fieldnames=fieldnames, delimiter=';')
writer.writeheader()
cur = conn.cursor()
sql_statement = "SELECT idx, quadkey, ST_AsText(geom) FROM Buildings"
cur.execute(sql_statement)
buildings = cur.fetchall()
for building in buildings:
print(building)
writer.writerow({'OSM_ID': 'OSM_%d' % building[0], 'geometry': building[2], 'cell_ID': 'cell_%s' % building[1]})
def output_exposure(conn, exposure_filepath):
with open(exposure_filepath, mode='w') as exposure_file:
fieldnames = ['id', 'lon', 'lat', 'taxonomy', 'number', 'structural', 'night', 'occupancy', 'admin_name', 'admin_ID', 'origin_id']
writer = csv.DictWriter(exposure_file, fieldnames=fieldnames, delimiter=',')
writer.writeheader()
cur = conn.cursor()
sql_statement = "SELECT Exposure.idx, X(Buildings.centroid), Y(Buildings.centroid), Exposure.taxonomy, "
sql_statement += "Exposure.number, Exposure.structural, Exposure.night, Locations.occupancy, Locations.name, "
sql_statement += "Locations.ID, Buildings.idx FROM Exposure "
sql_statement += "INNER JOIN Buildings ON Exposure.building_id = Buildings.idx "
sql_statement += "INNER JOIN Locations ON Buildings.loc_id = Locations.idx"
cur.execute(sql_statement)
assets = cur.fetchall()
for asset in assets:
print(asset)
writer.writerow({'id': 'GDE_%s_%d' % (asset[7], asset[0]),
'lon': asset[1],
'lat': asset[2],
'taxonomy': asset[3],
'number': asset[4],
'structural': asset[5],
'night': asset[6],
'occupancy': asset[7],
'admin_name': asset[8],
'admin_ID': asset[9],
'origin_id': 'OSM_%d' % asset[10]})
return 0
def main():
# conn = create_sara_database('../skr/testdb.sqlite')
conn = start_sara_database('../skr/testdb.sqlite')
# read_sara_exposure_into_database(conn, '../data/Exposure_Ind_Chile.csv')
# add_voronoi_cells(conn)
# add_buildings(conn, '../data/obm.building.small.gpkg')
# classify_buildings(conn)
conn = create_sara_database('../skr/testdb.sqlite')
# conn = start_sara_database('../skr/testdb.sqlite')
read_sara_exposure_into_database(conn, '../data/Exposure_Res_Chile.csv', 'Res')
add_voronoi_cells(conn)
add_buildings(conn, '../data/obm.building.gpkg')
classify_buildings(conn)
output_tiles(conn, '../skr/tiles.csv')
# output_exposure('../skr/exposure.csv')
output_building_polygons(conn, '../skr/polygons.csv')
output_exposure(conn, '../skr/exposure.csv')
main()
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment