Skip to content

Commit

Permalink
make stats export working
Browse files Browse the repository at this point in the history
use damn ugly workarounds, do not use in production! #275
  • Loading branch information
nesnoj committed Oct 19, 2018
1 parent 112e79e commit e8d0862
Show file tree
Hide file tree
Showing 3 changed files with 110 additions and 8 deletions.
83 changes: 83 additions & 0 deletions ding0/examples/export_pkl_to_db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import os
from datetime import datetime
from sqlalchemy.orm import sessionmaker
import json

from ding0.tools import results
from ding0.io.export import export_network
from egoio.tools.db import connection
from ding0.io.db_export import create_ding0_sql_tables, export_all_dataframes_to_db


if __name__ == '__main__':
base_path = '/home/jonathan/RLI/rli-daten/Ding0/20180823154014/'
schema = 'dingo'
run_id = 'eGo_Abschluss_WS_v1'

# mv_grid_districts = [2304, 2305, 2306, 2311, 2317, 2318,
# 2319, 2320, 2321, 2322, 2375, 2376,
# 2399, 2434, 2483, 2487, 2492, 2745,
# 2749, 2775, 2776, 2783, 2813, 2827,
# 2828, 2832, 2833, 2843]
# mv_grid_districts.extend([739, 793, 1293, 1295, 1310, 1315,
# 1316, 1317, 1318, 1325, 1328, 1329,
# 1330, 1331, 1334, 1335, 1336, 2255,
# 2275])
# mv_grid_districts.extend([746, 784, 831, 841, 842, 935,
# 936, 942, 943, 948])
# mv_grid_districts.extend([1808, 1811, 1812, 1813, 1814, 1822,
# 1823, 1824, 1831, 1943, 1947])
# mv_grid_districts.extend([9])

# mv_grid_districts = [1686, 1691, 1710, 1769, 1800, 2750,
# 2751, 2757, 2785, 2809, 2820, 2852,
# 2888, 2896, 2910]
#mv_grid_districts = [1676, 1698, 1699, 1704, 1787, 1798]
#mv_grid_districts = list(range(401,501))
# mv_grid_districts = [447, 299, 396, 374, 1699, 409,
# 160, 1808, 11, 340, 306, 1329,
# 2375, 2275, 189, 266, 396, 245,
# 351, 238, 391, 474, 460]
mv_grid_districts = [2079]

filenames = ["ding0_grids__{ext}.pkl".format(ext=_)
for _ in mv_grid_districts]


# load Ding0 data
nds = []
for filename in filenames:
try:
nd_load = results.load_nd_from_pickle(filename=
os.path.join(base_path,
filename))

nds.append(nd_load)
except:
print("File {mvgd} not found. It was maybe excluded by Ding0 or "
"just forgotten to generate by you...".format(mvgd=filename))

nd = nds[0]

for n in nds[1:]:
nd.add_mv_grid_district(n._mv_grid_districts[0])


# #########SQLAlchemy and DB table################
db_engine = connection(section='local')
session = sessionmaker(bind=db_engine)()

# #########Ding0 Network and NW Metadata################


#####################################################

# tested with reiners_db and oedb
create_ding0_sql_tables(db_engine, ding0_schema=schema)
# drop_ding0_db_tables(oedb_engine, SCHEMA)
# db_tables_change_owner(oedb_engine, SCHEMA)

# parameter: export_network_to_db(engine, schema, df, tabletype, srid=None)
# export_network_to_db(reiners_engine, SCHEMA, lv_gen, "lv_gen", metadata_json)
# export_network_to_db(CONNECTION, SCHEMA, mv_stations, "mv_stations", metadata_json)
export_all_dataframes_to_db(db_engine, schema, nw=nd, my_run_id=run_id)
27 changes: 21 additions & 6 deletions ding0/io/db_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
METADATA = DECLARATIVE_BASE.metadata

# Set the Database schema which you want to add the tables to
SCHEMA = "model_draft"
SCHEMA = "dingo"

# Metadata folder Path
METADATA_STRING_FOLDER = os.path.join(ding0.__path__[0], 'io', 'metadatastrings')
Expand Down Expand Up @@ -278,7 +278,7 @@ def create_ding0_sql_tables(engine, ding0_schema=SCHEMA):
Column('name', String(100)),
Column('geom', Geometry('POINT', 4326)),
Column('type', String(22)),
Column('subtype', String(22)),
Column('subtype', String(30)),
Column('v_level', Integer),
Column('nominal_capacity', Float(10)),
Column('weather_cell_id', BigInteger),
Expand Down Expand Up @@ -601,7 +601,7 @@ def change_owner(engine, table, role, schema):
engine.close()


def export_all_dataframes_to_db(engine, schema):
def export_all_dataframes_to_db(engine, schema, nw, my_run_id):
"""
exports all data frames from func. export_network() to the db tables
Expand All @@ -612,14 +612,29 @@ def export_all_dataframes_to_db(engine, schema):
:param schema:
"""

if engine.dialect.has_table(engine, DING0_TABLES["versioning"]):
# srid
# ToDo: Check why converted to int and string
global SRID
SRID = str(int(nw.config['geo']['srid']))

# return values from export_network() as tupels
run_id, nw_metadata, \
lv_grid, lv_gen, lv_cd, lv_stations, mvlv_trafos, lv_loads, \
mv_grid, mv_gen, mv_cb, mv_cd, mv_stations, hvmv_trafos, mv_loads, \
lines, mvlv_mapping = export_network(nw)

run_id = my_run_id

metadata_json = json.loads(nw_metadata)

if engine.dialect.has_table(engine, DING0_TABLES["versioning"], schema=schema):

db_versioning = pd.read_sql_table(DING0_TABLES['versioning'], engine, schema,
columns=['run_id', 'description'])
# Use for another run with different run_id
# if metadata_json['run_id'] not in db_versioning['run_id']:
if metadata_json['run_id'] not in db_versioning['run_id']:
# Use if just one run_id should be present to the DB table
if db_versioning.empty:
#if db_versioning.empty:
# this leads to wrong run_id if run_id is SET in __main__ -> 'run_id': metadata_json['run_id']
metadata_df = pd.DataFrame({'run_id': metadata_json['run_id'],
'description': str(metadata_json)}, index=[0])
Expand Down
8 changes: 6 additions & 2 deletions ding0/io/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from ding0.core import LVLoadAreaCentreDing0

from geoalchemy2.shape import from_shape, to_shape
from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping
from shapely.geometry import Point, MultiPoint, MultiLineString, LineString, MultiPolygon, shape, mapping, Polygon


def export_network(nw, mode='', run_id=None):
Expand Down Expand Up @@ -148,6 +148,7 @@ def aggregate_loads(la_center, aggr):

return aggr

idx = 0
for mv_district in nw.mv_grid_districts():
from shapely.wkt import dumps as wkt_dumps
mv_grid_id = mv_district.mv_grid.id_db
Expand Down Expand Up @@ -179,7 +180,7 @@ def aggregate_loads(la_center, aggr):
mv_station_name = '_'.join(
['MVStationDing0', 'MV', str(mv_station.id_db),
str(mv_station.id_db)])
mv_stations_dict[0] = {
mv_stations_dict[idx] = {
'id': mv_district.mv_grid.id_db,
'name': mv_station_name,
'geom': wkt_dumps(mv_station.geo_data),
Expand Down Expand Up @@ -437,6 +438,8 @@ def aggregate_loads(la_center, aggr):
str(lv_district.lv_grid.id_db),
str(lv_district.lv_grid.id_db)])

if isinstance(lv_district.geo_data, Polygon):
lv_district.geo_data = MultiPolygon([lv_district.geo_data])
lvgrid_idx += 1
lv_grid_dict[lvgrid_idx] = {
'id': lv_district.lv_grid.id_db,
Expand Down Expand Up @@ -581,6 +584,7 @@ def aggregate_loads(la_center, aggr):
'run_id': run_id,
'geom': None
}
idx += 1

lv_grid = pd.DataFrame.from_dict(lv_grid_dict, orient='index')
lv_gen = pd.DataFrame.from_dict(lv_gen_dict, orient='index')
Expand Down

0 comments on commit e8d0862

Please sign in to comment.