Skip to content
Snippets Groups Projects
Commit 011a2aef authored by markn92's avatar markn92
Browse files

refactor

parent 164501f1
No related branches found
No related tags found
No related merge requests found
import logging
import random
from time import perf_counter
from evaluation.lib.T import *
from evaluation.lib.export import write_head, write_row
from evaluation.lib.queries import (
gasstation_query,
charge_query,
classic_query,
astar_query,
bidirectional_query,
CACHE
)
logger = logging.getLogger(__name__)
def insert_charging_stations(graph, number, charging_stations):
start = perf_counter()
graph.insert_charging_stations(charging_stations, number)
runtime = perf_counter() - start
logger.info('Importing {} Charging Stations took {:.2f} s'.format(
len(graph.charging_stations),
runtime
))
def query_benchmark(graphs, charging_stations, conf, result_dir):
# Charging Stations
query_conf = {
'classic': Query(classic_query, 'classic.csv', ClassicQueryRow),
'astar': Query(astar_query, 'astar.csv', AStarQueryRow),
'bidirectional': Query(bidirectional_query, 'bidirectional.csv', QueryRow),
'gasstation': Query(gasstation_query, 'gasstation.csv', GasstationQueryRow),
'charge': Query(charge_query, 'charge.csv', ChargeQueryRow)
}
# Remove existing results and write heads
for _, filename, row_class in query_conf.values():
with result_dir.joinpath(filename).open('w') as f:
write_head(f, row_class)
for map_name, G in zip(conf['maps'], graphs):
nodes = random.sample(list(G.nodes), k=2 * conf['queries_per_setup'])
for setup in conf['setups']:
# Random start and target nodes
start_nodes = nodes[:int(len(nodes) / 2)]
target_nodes = nodes[int(len(nodes) / 2):]
# Random adding of charging stations
insert_charging_stations(G, setup['charging_stations'], charging_stations)
# Get algorithms for this setup
query_confs = [
query_conf[key] for key in setup.get('algorithms', query_conf.keys())
]
for func, filename, row_class in query_confs:
logger.info('Running {} queries with {} on map {}'.format(
len(start_nodes),
func.__name__,
map_name
))
for i, (s, t) in enumerate(zip(start_nodes, target_nodes)):
logger.debug(f'{i + 1}/{len(start_nodes)}')
result_data = func(G, setup, s, t)
with result_dir.joinpath(filename).open('a') as f:
write_row(f, result_data)
# Delete cached graphs
for key in list(CACHE.keys()):
del CACHE[key]
import logging import logging
import gc
from time import perf_counter from time import perf_counter
import networkx as nx import networkx as nx
...@@ -48,6 +50,22 @@ def cached(key): ...@@ -48,6 +50,22 @@ def cached(key):
return decorator return decorator
def no_gc(func):
"""Run func without garbage collection."""
def inner(*args, **kwargs):
gcold = gc.isenabled()
gc.disable()
try:
r = func(*args, **kwargs)
finally:
if gcold:
gc.enable()
return r
return inner
@cached('gasstation') @cached('gasstation')
def get_contracted_graph(graph, conf, f): def get_contracted_graph(graph, conf, f):
start = perf_counter() start = perf_counter()
...@@ -80,6 +98,7 @@ def get_state_graph(contracted_graph, conf, f): ...@@ -80,6 +98,7 @@ def get_state_graph(contracted_graph, conf, f):
return state_graph, state_graph_time return state_graph, state_graph_time
@no_gc
def gasstation_query(graph, conf, s, t): def gasstation_query(graph, conf, s, t):
f = GasstationAccessFunctions(conf['consumption']['consumption_coefficient']) f = GasstationAccessFunctions(conf['consumption']['consumption_coefficient'])
contracted_graph, contraction_time = get_contracted_graph(graph, conf, f) contracted_graph, contraction_time = get_contracted_graph(graph, conf, f)
...@@ -118,6 +137,7 @@ def gasstation_query(graph, conf, s, t): ...@@ -118,6 +137,7 @@ def gasstation_query(graph, conf, s, t):
) )
@no_gc
def charge_query(graph, conf, s, t): def charge_query(graph, conf, s, t):
start = perf_counter() start = perf_counter()
if conf['consumption']['type'] == 'gasstation': if conf['consumption']['type'] == 'gasstation':
...@@ -149,6 +169,7 @@ def charge_query(graph, conf, s, t): ...@@ -149,6 +169,7 @@ def charge_query(graph, conf, s, t):
) )
@no_gc
def classic_query(graph, conf, s, t): def classic_query(graph, conf, s, t):
start = perf_counter() start = perf_counter()
try: try:
...@@ -169,6 +190,7 @@ def classic_query(graph, conf, s, t): ...@@ -169,6 +190,7 @@ def classic_query(graph, conf, s, t):
) )
@no_gc
def bidirectional_query(graph, conf, s, t): def bidirectional_query(graph, conf, s, t):
start = perf_counter() start = perf_counter()
try: try:
...@@ -189,6 +211,7 @@ def bidirectional_query(graph, conf, s, t): ...@@ -189,6 +211,7 @@ def bidirectional_query(graph, conf, s, t):
) )
@no_gc
def astar_query(graph, conf, s, t): def astar_query(graph, conf, s, t):
start = perf_counter() start = perf_counter()
try: try:
......
start_node,target_node,query_time,trip_time,nodes,edges,charging_stations
import argparse import argparse
import json
import gc
import random
import pickle import pickle
import json
import logging import logging
from time import perf_counter from time import perf_counter
from pathlib import Path from pathlib import Path
import yaml import yaml
from evrouting.osm.imports import read_osm from evrouting.osm.imports import read_osm
from evaluation.lib.T import * from evaluation.lib.benchmarks import query_benchmark
from evaluation.lib.export import write_head, write_row
from evaluation.lib.queries import (
gasstation_query,
charge_query,
classic_query,
astar_query,
bidirectional_query,
CACHE
)
base = Path(__file__).parent
def no_gc(func):
"""Run func without garbage collection."""
def inner(*args, **kwargs):
gcold = gc.isenabled()
gc.disable()
try:
r = func(*args, **kwargs)
finally:
if gcold:
gc.enable()
return r
return inner
def query_benchmark(graphs, conf, result_dir):
# Charging Stations
cs_path = base.joinpath('static').joinpath(conf['charging_stations'])
with cs_path.open() as f:
charging_stations = json.load(f)
query_conf = {
'classic': Query(classic_query, 'classic.csv', ClassicQueryRow),
'astar': Query(astar_query, 'astar.csv', AStarQueryRow),
'bidirectional': Query(bidirectional_query, 'bidirectional.csv', QueryRow),
'gasstation': Query(query_function=gasstation_query,
filename='gasstation.csv',
row_dataclass=GasstationQueryRow),
'charge': Query(charge_query, 'charge.csv', ChargeQueryRow)
}
# Remove existing results
for _, filename, row_class in query_conf.values():
with result_dir.joinpath(filename).open('w') as f:
write_head(f, row_class)
for map_name, G in zip(conf['maps'], graphs):
nodes = random.sample(list(G.nodes), k=2 * conf['queries_per_setup'])
for setup in conf['setups']:
# Random start and target nodes
start_nodes = nodes[:int(len(nodes) / 2)]
target_nodes = nodes[int(len(nodes) / 2):]
# Random adding of charging stations
insert_charging_stations(G, setup['charging_stations'], charging_stations)
# Get algorithms for this setup
query_confs = [
query_conf[key] for key in setup.get('algorithms', query_conf.keys())
]
for func, filename, row_class in query_confs:
logging.info('Running {} queries with {} on map {}'.format(
len(start_nodes),
func.__name__,
map_name
))
for i, (s, t) in enumerate(zip(start_nodes, target_nodes)):
logging.debug(f'{i + 1}/{len(start_nodes)}')
# Run tests with garbage collection disabled
result_data = no_gc(func)(G, setup, s, t)
with result_dir.joinpath(filename).open('a') as f:
write_row(f, result_data)
# Delete cached graphs
for key in list(CACHE.keys()):
del CACHE[key]
def insert_charging_stations(graph, number, charging_stations):
start = perf_counter()
graph.insert_charging_stations(charging_stations, number)
runtime = perf_counter() - start
logging.info('Importing {} Charging Stations took {:.2f} s'.format(
len(graph.charging_stations),
runtime
))
def get_map(osm_path: Path, backup_dir=None): def get_map(osm_path: Path, backup_dir=None):
...@@ -131,7 +37,8 @@ def apply_conversions(conf): ...@@ -131,7 +37,8 @@ def apply_conversions(conf):
"""kWh to Wh""" """kWh to Wh"""
for setup in conf['setups']: for setup in conf['setups']:
setup['capacity'] = 1000 * setup['capacity'] setup['capacity'] = 1000 * setup['capacity']
setup['consumption']['consumption_coefficient'] = 1000 * setup['consumption']['consumption_coefficient'] setup['consumption']['consumption_coefficient'] = \
1000 * setup['consumption']['consumption_coefficient']
setup['mu_s'] = 1000 * setup['mu_s'] setup['mu_s'] = 1000 * setup['mu_s']
setup['mu_t'] = 1000 * setup['mu_t'] setup['mu_t'] = 1000 * setup['mu_t']
return conf return conf
...@@ -142,6 +49,7 @@ if __name__ == '__main__': ...@@ -142,6 +49,7 @@ if __name__ == '__main__':
format='%(asctime)s %(message)s', format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p', datefmt='%m/%d/%Y %I:%M:%S %p',
level=logging.DEBUG) level=logging.DEBUG)
base = Path(__file__).parent
results_dir = base.joinpath('results') results_dir = base.joinpath('results')
static_dir = base.joinpath('static') static_dir = base.joinpath('static')
...@@ -172,9 +80,14 @@ if __name__ == '__main__': ...@@ -172,9 +80,14 @@ if __name__ == '__main__':
for m in conf['maps'] for m in conf['maps']
] ]
with static_dir.joinpath(conf['charging_stations']).open() as f:
charging_stations = json.load(f)
if conf['type'] == 'query': if conf['type'] == 'query':
query_dir = benchmark_dir.joinpath('queries') query_dir = benchmark_dir.joinpath('queries')
query_dir.mkdir(exist_ok=True) query_dir.mkdir(exist_ok=True)
query_benchmark(graphs=graphs, query_benchmark(graphs=graphs,
charging_stations=charging_stations,
conf=conf, conf=conf,
result_dir=query_dir) result_dir=query_dir
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment