Cleaning Posta POI worker
This commit is contained in:
parent
a146b5b5f9
commit
4af630a0f8
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
.idea
|
8
admiral-router/.editorconfig
Normal file
8
admiral-router/.editorconfig
Normal file
@ -0,0 +1,8 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = tab
|
||||
insert_final_newline = true
|
||||
max_line_length = 150
|
||||
tab_width = 4
|
||||
trim_trailing_whitespace = true
|
3
admiral-router/.gitignore
vendored
Normal file
3
admiral-router/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
.idea
|
||||
maps/*
|
||||
data
|
19
admiral-router/Dockerfile
Normal file
19
admiral-router/Dockerfile
Normal file
@ -0,0 +1,19 @@
|
||||
FROM ghcr.io/project-osrm/osrm-backend:v5.27.1
|
||||
|
||||
ARG TYPE
|
||||
ARG MAP
|
||||
|
||||
ENV MAP=${MAP}
|
||||
ENV TYPE=${TYPE}
|
||||
|
||||
COPY maps/${MAP}.osm.pbf /data/${MAP}.osm.pbf
|
||||
COPY vehicles/${TYPE}.lua /data/${TYPE}.lua
|
||||
|
||||
RUN osrm-extract -p /data/${TYPE}.lua /data/${MAP}.osm.pbf && \
|
||||
osrm-partition /data/${MAP}.osrm && \
|
||||
osrm-customize /data/${MAP}.osrm && \
|
||||
rm -f /data/${MAP}.osm.pbf
|
||||
|
||||
CMD osrm-routed --algorithm mld --max-table-size=1000000000 --max-viaroute-size=1000000000 --max-trip-size=1000000000 /data/${MAP}.osrm
|
||||
|
||||
EXPOSE 5000
|
26
admiral-router/INSTALL.sh
Normal file
26
admiral-router/INSTALL.sh
Normal file
@ -0,0 +1,26 @@
|
||||
country=slovenia
|
||||
port=5000
|
||||
|
||||
set -o xtrace
|
||||
mdir -p data
|
||||
cd data
|
||||
|
||||
if ! test -f "../maps/${country}-latest.osm.pbf"; then
|
||||
curl -kLSs "http://download.geofabrik.de/europe/${country}-latest.osm.pbf" -o "../maps/${country}-latest.osm.pbf"
|
||||
fi
|
||||
|
||||
for profile in bike car ev km kpm mk walk ;
|
||||
do
|
||||
mkdir -p $profile
|
||||
cp "../maps/${country}-latest.osm.pbf" "${profile}/${country}-${profile}.osm.pbf"
|
||||
cd $profile
|
||||
osrm-extract -p "../../vehicles/${profile}.lua" "${country}-${profile}.osm.pbf"
|
||||
osrm-partition "${country}-${profile}.osrm"
|
||||
osrm-customize "${country}-${profile}.osrm"
|
||||
echo "osrm-routed --port=${port} --algorithm mld --max-table-size=1000000000 --max-viaroute-size=1000000000 --max-trip-size=1000000000 ${country}-${profile}.osrm" >> RUN.sh
|
||||
(( port++ ))
|
||||
cd ..
|
||||
done
|
||||
|
||||
read -p "Press enter to continue"
|
||||
exit 0
|
3
admiral-router/Makefile
Normal file
3
admiral-router/Makefile
Normal file
@ -0,0 +1,3 @@
|
||||
|
||||
init:
|
||||
wget http://download.geofabrik.de/europe/slovenia-latest.osm.pbf -P ./maps
|
20
admiral-router/README.md
Normal file
20
admiral-router/README.md
Normal file
@ -0,0 +1,20 @@
|
||||
# osrm-backend-nginx
|
||||
|
||||
* bike -> bicycle
|
||||
* car -> regular car
|
||||
* ev -> electric vehicle (tricikel, štirikolesnik)
|
||||
* km -> kolo z motorjem
|
||||
* kpm -> kolo z pomoznim motorjem
|
||||
* mk -> motor kolo
|
||||
* walk -> hoja
|
||||
|
||||
# Run on dev
|
||||
```bash
|
||||
# First time and when you want to update the map
|
||||
make
|
||||
# First time and when map or .lua files are changed
|
||||
docker compose build
|
||||
|
||||
#run
|
||||
docker compose up
|
||||
```
|
82
admiral-router/docker-compose.yml
Normal file
82
admiral-router/docker-compose.yml
Normal file
@ -0,0 +1,82 @@
|
||||
#We creating a container for each api endpoint and connect nginx to endpoints to creating a single api call for our endpoints
|
||||
#ARGS:
|
||||
# TYPE=(car,foot,bicycle)
|
||||
# MAP(default)="iran-latest"
|
||||
|
||||
services:
|
||||
bike:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
TYPE: bike
|
||||
MAP: slovenia-latest
|
||||
ports:
|
||||
- 5000:5000
|
||||
container_name: osrm-backend_bike
|
||||
restart: always
|
||||
|
||||
car:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
TYPE: car
|
||||
MAP: slovenia-latest
|
||||
ports:
|
||||
- 5001:5000
|
||||
container_name: osrm-backend_car
|
||||
restart: always
|
||||
|
||||
ev:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
TYPE: ev
|
||||
MAP: slovenia-latest
|
||||
ports:
|
||||
- 5002:5000
|
||||
container_name: osrm-backend_ev
|
||||
restart: always
|
||||
|
||||
km:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
TYPE: km
|
||||
MAP: slovenia-latest
|
||||
ports:
|
||||
- 5003:5000
|
||||
container_name: osrm-backend_km
|
||||
restart: always
|
||||
|
||||
kpm:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
TYPE: kpm
|
||||
MAP: slovenia-latest
|
||||
ports:
|
||||
- 5004:5000
|
||||
container_name: osrm-backend_kpm
|
||||
restart: always
|
||||
|
||||
mk:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
TYPE: mk
|
||||
MAP: slovenia-latest
|
||||
ports:
|
||||
- 5005:5000
|
||||
container_name: osrm-backend_mk
|
||||
restart: always
|
||||
|
||||
walk:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
TYPE: walk
|
||||
MAP: slovenia-latest
|
||||
container_name: osrm-backend_walk
|
||||
ports:
|
||||
- 5006:5000
|
||||
restart: always
|
BIN
admiral-router/docs/vehicle_profiles.png
Normal file
BIN
admiral-router/docs/vehicle_profiles.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 63 KiB |
682
admiral-router/vehicles/bike.lua
Normal file
682
admiral-router/vehicles/bike.lua
Normal file
@ -0,0 +1,682 @@
|
||||
-- Bicycle profile
|
||||
|
||||
api_version = 4
|
||||
|
||||
Set = require('lib/set')
|
||||
Sequence = require('lib/sequence')
|
||||
Handlers = require("lib/way_handlers")
|
||||
find_access_tag = require("lib/access").find_access_tag
|
||||
limit = require("lib/maxspeed").limit
|
||||
Measure = require("lib/measure")
|
||||
|
||||
function setup()
|
||||
local max_speed = 30
|
||||
local default_speed = 11
|
||||
local walking_speed = 5
|
||||
|
||||
return {
|
||||
properties = {
|
||||
u_turn_penalty = 20,
|
||||
traffic_light_penalty = 2,
|
||||
--weight_name = 'cyclability',
|
||||
weight_name = 'duration',
|
||||
process_call_tagless_node = false,
|
||||
max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s
|
||||
use_turn_restrictions = false,
|
||||
continue_straight_at_waypoint = false,
|
||||
mode_change_penalty = 30,
|
||||
},
|
||||
|
||||
default_mode = mode.cycling,
|
||||
default_speed = default_speed,
|
||||
walking_speed = walking_speed,
|
||||
oneway_handling = true,
|
||||
turn_penalty = 6,
|
||||
turn_bias = 1.4,
|
||||
use_public_transport = true,
|
||||
|
||||
allowed_start_modes = Set {
|
||||
mode.cycling,
|
||||
mode.pushing_bike
|
||||
},
|
||||
|
||||
barrier_blacklist = Set {
|
||||
'yes',
|
||||
'wall',
|
||||
'fence'
|
||||
},
|
||||
|
||||
access_tag_whitelist = Set {
|
||||
'yes',
|
||||
'permissive',
|
||||
'designated'
|
||||
},
|
||||
|
||||
access_tag_blacklist = Set {
|
||||
'no',
|
||||
-- When a way is tagged with `use_sidepath` a parallel way suitable for
|
||||
-- cyclists is mapped and must be used instead (by law). This tag is
|
||||
-- used on ways that normally may be used by cyclists, but not when
|
||||
-- a signposted parallel cycleway is available. For purposes of routing
|
||||
-- cyclists, this value should be treated as 'no access for bicycles'.
|
||||
'use_sidepath'
|
||||
},
|
||||
|
||||
restricted_access_tag_list = Set { },
|
||||
|
||||
restricted_highway_whitelist = Set { },
|
||||
|
||||
-- tags disallow access to in combination with highway=service
|
||||
service_access_tag_blacklist = Set { },
|
||||
|
||||
construction_whitelist = Set {
|
||||
'no',
|
||||
'widening',
|
||||
'minor',
|
||||
},
|
||||
|
||||
access_tags_hierarchy = Sequence {
|
||||
'bicycle',
|
||||
'vehicle',
|
||||
'access'
|
||||
},
|
||||
|
||||
restrictions = Set {
|
||||
'bicycle'
|
||||
},
|
||||
|
||||
cycleway_tags = Set {
|
||||
'track',
|
||||
'lane',
|
||||
'share_busway',
|
||||
'sharrow',
|
||||
'shared',
|
||||
'shared_lane'
|
||||
},
|
||||
|
||||
opposite_cycleway_tags = Set {
|
||||
'opposite',
|
||||
'opposite_lane',
|
||||
'opposite_track',
|
||||
},
|
||||
|
||||
-- reduce the driving speed by 30% for unsafe roads
|
||||
-- only used for cyclability metric
|
||||
unsafe_highway_list = {
|
||||
primary = 0.5,
|
||||
secondary = 0.65,
|
||||
tertiary = 0.8,
|
||||
primary_link = 0.5,
|
||||
secondary_link = 0.65,
|
||||
tertiary_link = 0.8,
|
||||
},
|
||||
|
||||
service_penalties = {
|
||||
alley = 0.5,
|
||||
},
|
||||
|
||||
bicycle_speeds = {
|
||||
cycleway = default_speed,
|
||||
primary = default_speed,
|
||||
primary_link = default_speed,
|
||||
secondary = default_speed,
|
||||
secondary_link = default_speed,
|
||||
tertiary = default_speed,
|
||||
tertiary_link = default_speed,
|
||||
residential = default_speed,
|
||||
unclassified = default_speed,
|
||||
living_street = default_speed,
|
||||
road = default_speed,
|
||||
service = default_speed,
|
||||
track = default_speed,
|
||||
path = default_speed
|
||||
},
|
||||
|
||||
pedestrian_speeds = {
|
||||
footway = walking_speed,
|
||||
pedestrian = walking_speed,
|
||||
steps = 2
|
||||
},
|
||||
|
||||
railway_speeds = {
|
||||
train = 10,
|
||||
railway = 10,
|
||||
subway = 10,
|
||||
light_rail = 10,
|
||||
monorail = 10,
|
||||
tram = 10
|
||||
},
|
||||
|
||||
platform_speeds = {
|
||||
platform = walking_speed
|
||||
},
|
||||
|
||||
amenity_speeds = {
|
||||
parking = default_speed,
|
||||
parking_entrance = default_speed
|
||||
},
|
||||
|
||||
man_made_speeds = {
|
||||
pier = walking_speed
|
||||
},
|
||||
|
||||
route_speeds = {
|
||||
ferry = 5
|
||||
},
|
||||
|
||||
bridge_speeds = {
|
||||
movable = 5
|
||||
},
|
||||
|
||||
surface_speeds = {
|
||||
asphalt = default_speed,
|
||||
["cobblestone:flattened"] = 10,
|
||||
paving_stones = 10,
|
||||
compacted = 10,
|
||||
cobblestone = 6,
|
||||
unpaved = 6,
|
||||
fine_gravel = 6,
|
||||
gravel = 6,
|
||||
pebblestone = 6,
|
||||
ground = 6,
|
||||
dirt = 6,
|
||||
earth = 6,
|
||||
grass = 6,
|
||||
mud = 3,
|
||||
sand = 3,
|
||||
sett = 10
|
||||
},
|
||||
|
||||
classes = Sequence {
|
||||
'ferry', 'tunnel'
|
||||
},
|
||||
|
||||
-- Which classes should be excludable
|
||||
-- This increases memory usage so its disabled by default.
|
||||
excludable = Sequence {
|
||||
-- Set {'ferry'}
|
||||
},
|
||||
|
||||
tracktype_speeds = {
|
||||
},
|
||||
|
||||
smoothness_speeds = {
|
||||
},
|
||||
|
||||
avoid = Set {
|
||||
'impassable',
|
||||
'construction'
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
function process_node(profile, node, result)
|
||||
-- parse access and barrier tags
|
||||
local highway = node:get_value_by_key("highway")
|
||||
local is_crossing = highway and highway == "crossing"
|
||||
|
||||
local access = find_access_tag(node, profile.access_tags_hierarchy)
|
||||
if access and access ~= "" then
|
||||
-- access restrictions on crossing nodes are not relevant for
|
||||
-- the traffic on the road
|
||||
if profile.access_tag_blacklist[access] and not is_crossing then
|
||||
result.barrier = true
|
||||
end
|
||||
else
|
||||
local barrier = node:get_value_by_key("barrier")
|
||||
if barrier and "" ~= barrier then
|
||||
if profile.barrier_blacklist[barrier] then
|
||||
result.barrier = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if node is a traffic light
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if tag and "traffic_signals" == tag then
|
||||
result.traffic_lights = true
|
||||
end
|
||||
end
|
||||
|
||||
function handle_bicycle_tags(profile,way,result,data)
|
||||
-- initial routability check, filters out buildings, boundaries, etc
|
||||
data.route = way:get_value_by_key("route")
|
||||
data.man_made = way:get_value_by_key("man_made")
|
||||
data.railway = way:get_value_by_key("railway")
|
||||
data.amenity = way:get_value_by_key("amenity")
|
||||
data.public_transport = way:get_value_by_key("public_transport")
|
||||
data.bridge = way:get_value_by_key("bridge")
|
||||
|
||||
if (not data.highway or data.highway == '') and
|
||||
(not data.route or data.route == '') and
|
||||
(not profile.use_public_transport or not data.railway or data.railway=='') and
|
||||
(not data.amenity or data.amenity=='') and
|
||||
(not data.man_made or data.man_made=='') and
|
||||
(not data.public_transport or data.public_transport=='') and
|
||||
(not data.bridge or data.bridge=='')
|
||||
then
|
||||
return false
|
||||
end
|
||||
|
||||
-- access
|
||||
data.access = find_access_tag(way, profile.access_tags_hierarchy)
|
||||
if data.access and profile.access_tag_blacklist[data.access] then
|
||||
return false
|
||||
end
|
||||
|
||||
-- other tags
|
||||
data.junction = way:get_value_by_key("junction")
|
||||
data.maxspeed = Measure.get_max_speed(way:get_value_by_key ("maxspeed")) or 0
|
||||
data.maxspeed_forward = Measure.get_max_speed(way:get_value_by_key("maxspeed:forward")) or 0
|
||||
data.maxspeed_backward = Measure.get_max_speed(way:get_value_by_key("maxspeed:backward")) or 0
|
||||
data.barrier = way:get_value_by_key("barrier")
|
||||
data.oneway = way:get_value_by_key("oneway")
|
||||
data.oneway_bicycle = way:get_value_by_key("oneway:bicycle")
|
||||
data.cycleway = way:get_value_by_key("cycleway")
|
||||
data.cycleway_left = way:get_value_by_key("cycleway:left")
|
||||
data.cycleway_right = way:get_value_by_key("cycleway:right")
|
||||
data.duration = way:get_value_by_key("duration")
|
||||
data.service = way:get_value_by_key("service")
|
||||
data.foot = way:get_value_by_key("foot")
|
||||
data.foot_forward = way:get_value_by_key("foot:forward")
|
||||
data.foot_backward = way:get_value_by_key("foot:backward")
|
||||
data.bicycle = way:get_value_by_key("bicycle")
|
||||
|
||||
speed_handler(profile,way,result,data)
|
||||
|
||||
oneway_handler(profile,way,result,data)
|
||||
|
||||
cycleway_handler(profile,way,result,data)
|
||||
|
||||
bike_push_handler(profile,way,result,data)
|
||||
|
||||
|
||||
-- maxspeed
|
||||
limit( result, data.maxspeed, data.maxspeed_forward, data.maxspeed_backward )
|
||||
|
||||
-- not routable if no speed assigned
|
||||
-- this avoid assertions in debug builds
|
||||
if result.forward_speed <= 0 and result.duration <= 0 then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
if result.backward_speed <= 0 and result.duration <= 0 then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
safety_handler(profile,way,result,data)
|
||||
end
|
||||
|
||||
|
||||
|
||||
function speed_handler(profile,way,result,data)
|
||||
|
||||
data.way_type_allows_pushing = false
|
||||
|
||||
-- speed
|
||||
local bridge_speed = profile.bridge_speeds[data.bridge]
|
||||
if (bridge_speed and bridge_speed > 0) then
|
||||
data.highway = data.bridge
|
||||
if data.duration and durationIsValid(data.duration) then
|
||||
result.duration = math.max( parseDuration(data.duration), 1 )
|
||||
end
|
||||
result.forward_speed = bridge_speed
|
||||
result.backward_speed = bridge_speed
|
||||
data.way_type_allows_pushing = true
|
||||
elseif profile.route_speeds[data.route] then
|
||||
-- ferries (doesn't cover routes tagged using relations)
|
||||
result.forward_mode = mode.ferry
|
||||
result.backward_mode = mode.ferry
|
||||
if data.duration and durationIsValid(data.duration) then
|
||||
result.duration = math.max( 1, parseDuration(data.duration) )
|
||||
else
|
||||
result.forward_speed = profile.route_speeds[data.route]
|
||||
result.backward_speed = profile.route_speeds[data.route]
|
||||
end
|
||||
-- railway platforms (old tagging scheme)
|
||||
elseif data.railway and profile.platform_speeds[data.railway] then
|
||||
result.forward_speed = profile.platform_speeds[data.railway]
|
||||
result.backward_speed = profile.platform_speeds[data.railway]
|
||||
data.way_type_allows_pushing = true
|
||||
-- public_transport platforms (new tagging platform)
|
||||
elseif data.public_transport and profile.platform_speeds[data.public_transport] then
|
||||
result.forward_speed = profile.platform_speeds[data.public_transport]
|
||||
result.backward_speed = profile.platform_speeds[data.public_transport]
|
||||
data.way_type_allows_pushing = true
|
||||
-- railways
|
||||
elseif profile.use_public_transport and data.railway and profile.railway_speeds[data.railway] and profile.access_tag_whitelist[data.access] then
|
||||
result.forward_mode = mode.train
|
||||
result.backward_mode = mode.train
|
||||
result.forward_speed = profile.railway_speeds[data.railway]
|
||||
result.backward_speed = profile.railway_speeds[data.railway]
|
||||
elseif data.amenity and profile.amenity_speeds[data.amenity] then
|
||||
-- parking areas
|
||||
result.forward_speed = profile.amenity_speeds[data.amenity]
|
||||
result.backward_speed = profile.amenity_speeds[data.amenity]
|
||||
data.way_type_allows_pushing = true
|
||||
elseif profile.bicycle_speeds[data.highway] then
|
||||
-- regular ways
|
||||
result.forward_speed = profile.bicycle_speeds[data.highway]
|
||||
result.backward_speed = profile.bicycle_speeds[data.highway]
|
||||
data.way_type_allows_pushing = true
|
||||
elseif data.access and profile.access_tag_whitelist[data.access] then
|
||||
-- unknown way, but valid access tag
|
||||
result.forward_speed = profile.default_speed
|
||||
result.backward_speed = profile.default_speed
|
||||
data.way_type_allows_pushing = true
|
||||
end
|
||||
end
|
||||
|
||||
function oneway_handler(profile,way,result,data)
|
||||
-- oneway
|
||||
data.implied_oneway = data.junction == "roundabout" or data.junction == "circular" or data.highway == "motorway"
|
||||
data.reverse = false
|
||||
|
||||
if data.oneway_bicycle == "yes" or data.oneway_bicycle == "1" or data.oneway_bicycle == "true" then
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif data.oneway_bicycle == "no" or data.oneway_bicycle == "0" or data.oneway_bicycle == "false" then
|
||||
-- prevent other cases
|
||||
elseif data.oneway_bicycle == "-1" then
|
||||
result.forward_mode = mode.inaccessible
|
||||
data.reverse = true
|
||||
elseif data.oneway == "yes" or data.oneway == "1" or data.oneway == "true" then
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif data.oneway == "no" or data.oneway == "0" or data.oneway == "false" then
|
||||
-- prevent other cases
|
||||
elseif data.oneway == "-1" then
|
||||
result.forward_mode = mode.inaccessible
|
||||
data.reverse = true
|
||||
elseif data.implied_oneway then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
function cycleway_handler(profile,way,result,data)
|
||||
-- cycleway
|
||||
data.has_cycleway_forward = false
|
||||
data.has_cycleway_backward = false
|
||||
data.is_twoway = result.forward_mode ~= mode.inaccessible and result.backward_mode ~= mode.inaccessible and not data.implied_oneway
|
||||
|
||||
-- cycleways on normal roads
|
||||
if data.is_twoway then
|
||||
if data.cycleway and profile.cycleway_tags[data.cycleway] then
|
||||
data.has_cycleway_backward = true
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
if (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) then
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
if (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) then
|
||||
data.has_cycleway_backward = true
|
||||
end
|
||||
else
|
||||
local has_twoway_cycleway = (data.cycleway and profile.opposite_cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left])
|
||||
local has_opposite_cycleway = (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right])
|
||||
local has_oneway_cycleway = (data.cycleway and profile.cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.cycleway_tags[data.cycleway_left])
|
||||
|
||||
-- set cycleway even though it is an one-way if opposite is tagged
|
||||
if has_twoway_cycleway then
|
||||
data.has_cycleway_backward = true
|
||||
data.has_cycleway_forward = true
|
||||
elseif has_opposite_cycleway then
|
||||
if not data.reverse then
|
||||
data.has_cycleway_backward = true
|
||||
else
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
elseif has_oneway_cycleway then
|
||||
if not data.reverse then
|
||||
data.has_cycleway_forward = true
|
||||
else
|
||||
data.has_cycleway_backward = true
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
if data.has_cycleway_backward then
|
||||
result.backward_mode = mode.cycling
|
||||
result.backward_speed = profile.bicycle_speeds["cycleway"]
|
||||
end
|
||||
|
||||
if data.has_cycleway_forward then
|
||||
result.forward_mode = mode.cycling
|
||||
result.forward_speed = profile.bicycle_speeds["cycleway"]
|
||||
end
|
||||
end
|
||||
|
||||
function bike_push_handler(profile,way,result,data)
|
||||
-- pushing bikes - if no other mode found
|
||||
if result.forward_mode == mode.inaccessible or result.backward_mode == mode.inaccessible or
|
||||
result.forward_speed == -1 or result.backward_speed == -1 then
|
||||
if data.foot ~= 'no' then
|
||||
local push_forward_speed = nil
|
||||
local push_backward_speed = nil
|
||||
|
||||
if profile.pedestrian_speeds[data.highway] then
|
||||
push_forward_speed = profile.pedestrian_speeds[data.highway]
|
||||
push_backward_speed = profile.pedestrian_speeds[data.highway]
|
||||
elseif data.man_made and profile.man_made_speeds[data.man_made] then
|
||||
push_forward_speed = profile.man_made_speeds[data.man_made]
|
||||
push_backward_speed = profile.man_made_speeds[data.man_made]
|
||||
else
|
||||
if data.foot == 'yes' then
|
||||
push_forward_speed = profile.walking_speed
|
||||
if not data.implied_oneway then
|
||||
push_backward_speed = profile.walking_speed
|
||||
end
|
||||
elseif data.foot_forward == 'yes' then
|
||||
push_forward_speed = profile.walking_speed
|
||||
elseif data.foot_backward == 'yes' then
|
||||
push_backward_speed = profile.walking_speed
|
||||
elseif data.way_type_allows_pushing then
|
||||
push_forward_speed = profile.walking_speed
|
||||
if not data.implied_oneway then
|
||||
push_backward_speed = profile.walking_speed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if push_forward_speed and (result.forward_mode == mode.inaccessible or result.forward_speed == -1) then
|
||||
result.forward_mode = mode.pushing_bike
|
||||
result.forward_speed = push_forward_speed
|
||||
end
|
||||
if push_backward_speed and (result.backward_mode == mode.inaccessible or result.backward_speed == -1)then
|
||||
result.backward_mode = mode.pushing_bike
|
||||
result.backward_speed = push_backward_speed
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
-- dismount
|
||||
if data.bicycle == "dismount" then
|
||||
result.forward_mode = mode.pushing_bike
|
||||
result.backward_mode = mode.pushing_bike
|
||||
result.forward_speed = profile.walking_speed
|
||||
result.backward_speed = profile.walking_speed
|
||||
end
|
||||
end
|
||||
|
||||
function safety_handler(profile,way,result,data)
|
||||
-- convert duration into cyclability
|
||||
if profile.properties.weight_name == 'cyclability' then
|
||||
local safety_penalty = profile.unsafe_highway_list[data.highway] or 1.
|
||||
local is_unsafe = safety_penalty < 1
|
||||
|
||||
-- primaries that are one ways are probably huge primaries where the lanes need to be separated
|
||||
if is_unsafe and data.highway == 'primary' and not data.is_twoway then
|
||||
safety_penalty = safety_penalty * 0.5
|
||||
end
|
||||
if is_unsafe and data.highway == 'secondary' and not data.is_twoway then
|
||||
safety_penalty = safety_penalty * 0.6
|
||||
end
|
||||
|
||||
local forward_is_unsafe = is_unsafe and not data.has_cycleway_forward
|
||||
local backward_is_unsafe = is_unsafe and not data.has_cycleway_backward
|
||||
local is_undesireable = data.highway == "service" and profile.service_penalties[data.service]
|
||||
local forward_penalty = 1.
|
||||
local backward_penalty = 1.
|
||||
if forward_is_unsafe then
|
||||
forward_penalty = math.min(forward_penalty, safety_penalty)
|
||||
end
|
||||
if backward_is_unsafe then
|
||||
backward_penalty = math.min(backward_penalty, safety_penalty)
|
||||
end
|
||||
|
||||
if is_undesireable then
|
||||
forward_penalty = math.min(forward_penalty, profile.service_penalties[data.service])
|
||||
backward_penalty = math.min(backward_penalty, profile.service_penalties[data.service])
|
||||
end
|
||||
|
||||
if result.forward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.forward_rate = result.forward_speed / 3.6 * forward_penalty
|
||||
end
|
||||
if result.backward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.backward_rate = result.backward_speed / 3.6 * backward_penalty
|
||||
end
|
||||
if result.duration > 0 then
|
||||
result.weight = result.duration / forward_penalty
|
||||
end
|
||||
|
||||
if data.highway == "bicycle" then
|
||||
safety_bonus = safety_bonus + 0.2
|
||||
if result.forward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.forward_rate = result.forward_speed / 3.6 * safety_bonus
|
||||
end
|
||||
if result.backward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.backward_rate = result.backward_speed / 3.6 * safety_bonus
|
||||
end
|
||||
if result.duration > 0 then
|
||||
result.weight = result.duration / safety_bonus
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
function process_way(profile, way, result)
|
||||
-- the initial filtering of ways based on presence of tags
|
||||
-- affects processing times significantly, because all ways
|
||||
-- have to be checked.
|
||||
-- to increase performance, prefetching and initial tag check
|
||||
-- is done directly instead of via a handler.
|
||||
|
||||
-- in general we should try to abort as soon as
|
||||
-- possible if the way is not routable, to avoid doing
|
||||
-- unnecessary work. this implies we should check things that
|
||||
-- commonly forbids access early, and handle edge cases later.
|
||||
|
||||
-- data table for storing intermediate values during processing
|
||||
|
||||
local data = {
|
||||
-- prefetch tags
|
||||
highway = way:get_value_by_key('highway'),
|
||||
|
||||
route = nil,
|
||||
man_made = nil,
|
||||
railway = nil,
|
||||
amenity = nil,
|
||||
public_transport = nil,
|
||||
bridge = nil,
|
||||
|
||||
access = nil,
|
||||
|
||||
junction = nil,
|
||||
maxspeed = nil,
|
||||
maxspeed_forward = nil,
|
||||
maxspeed_backward = nil,
|
||||
barrier = nil,
|
||||
oneway = nil,
|
||||
oneway_bicycle = nil,
|
||||
cycleway = nil,
|
||||
cycleway_left = nil,
|
||||
cycleway_right = nil,
|
||||
duration = nil,
|
||||
service = nil,
|
||||
foot = nil,
|
||||
foot_forward = nil,
|
||||
foot_backward = nil,
|
||||
bicycle = nil,
|
||||
|
||||
way_type_allows_pushing = false,
|
||||
has_cycleway_forward = false,
|
||||
has_cycleway_backward = false,
|
||||
is_twoway = true,
|
||||
reverse = false,
|
||||
implied_oneway = false
|
||||
}
|
||||
|
||||
local handlers = Sequence {
|
||||
-- set the default mode for this profile. if can be changed later
|
||||
-- in case it turns we're e.g. on a ferry
|
||||
WayHandlers.default_mode,
|
||||
|
||||
-- check various tags that could indicate that the way is not
|
||||
-- routable. this includes things like status=impassable,
|
||||
-- toll=yes and oneway=reversible
|
||||
WayHandlers.blocked_ways,
|
||||
|
||||
-- our main handler
|
||||
handle_bicycle_tags,
|
||||
|
||||
-- compute speed taking into account way type, maxspeed tags, etc.
|
||||
WayHandlers.surface,
|
||||
|
||||
-- handle turn lanes and road classification, used for guidance
|
||||
WayHandlers.classification,
|
||||
|
||||
-- handle allowed start/end modes
|
||||
WayHandlers.startpoint,
|
||||
|
||||
-- handle roundabouts
|
||||
WayHandlers.roundabouts,
|
||||
|
||||
-- set name, ref and pronunciation
|
||||
WayHandlers.names,
|
||||
|
||||
-- set classes
|
||||
WayHandlers.classes,
|
||||
|
||||
-- set weight properties of the way
|
||||
WayHandlers.weights
|
||||
}
|
||||
|
||||
WayHandlers.run(profile, way, result, data, handlers)
|
||||
end
|
||||
|
||||
function process_turn(profile, turn)
|
||||
-- compute turn penalty as angle^2, with a left/right bias
|
||||
local normalized_angle = turn.angle / 90.0
|
||||
if normalized_angle >= 0.0 then
|
||||
turn.duration = normalized_angle * normalized_angle * profile.turn_penalty / profile.turn_bias
|
||||
else
|
||||
turn.duration = normalized_angle * normalized_angle * profile.turn_penalty * profile.turn_bias
|
||||
end
|
||||
|
||||
if turn.is_u_turn then
|
||||
turn.duration = turn.duration + profile.properties.u_turn_penalty
|
||||
end
|
||||
|
||||
if turn.has_traffic_light then
|
||||
turn.duration = turn.duration + profile.properties.traffic_light_penalty
|
||||
end
|
||||
if profile.properties.weight_name == 'cyclability' then
|
||||
turn.weight = turn.duration
|
||||
end
|
||||
if turn.source_mode == mode.cycling and turn.target_mode ~= mode.cycling then
|
||||
turn.weight = turn.weight + profile.properties.mode_change_penalty
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
setup = setup,
|
||||
process_way = process_way,
|
||||
process_node = process_node,
|
||||
process_turn = process_turn
|
||||
}
|
504
admiral-router/vehicles/car.lua
Normal file
504
admiral-router/vehicles/car.lua
Normal file
@ -0,0 +1,504 @@
|
||||
-- Car profile
|
||||
|
||||
api_version = 4
|
||||
|
||||
Set = require('lib/set')
|
||||
Sequence = require('lib/sequence')
|
||||
Handlers = require("lib/way_handlers")
|
||||
Relations = require("lib/relations")
|
||||
find_access_tag = require("lib/access").find_access_tag
|
||||
limit = require("lib/maxspeed").limit
|
||||
Utils = require("lib/utils")
|
||||
Measure = require("lib/measure")
|
||||
|
||||
function setup()
|
||||
return {
|
||||
properties = {
|
||||
max_speed_for_map_matching = 130/3.6, -- 180kmph -> m/s
|
||||
-- For routing based on duration, but weighted for preferring certain roads
|
||||
-- weight_name = 'routability',
|
||||
-- For shortest duration without penalties for accessibility
|
||||
weight_name = 'duration',
|
||||
-- For shortest distance without penalties for accessibility
|
||||
-- weight_name = 'distance',
|
||||
process_call_tagless_node = false,
|
||||
u_turn_penalty = 20,
|
||||
continue_straight_at_waypoint = true,
|
||||
use_turn_restrictions = true,
|
||||
left_hand_driving = false,
|
||||
traffic_light_penalty = 2,
|
||||
},
|
||||
|
||||
default_mode = mode.driving,
|
||||
default_speed = 28,
|
||||
oneway_handling = true,
|
||||
side_road_multiplier = 0.8,
|
||||
turn_penalty = 7.5,
|
||||
speed_reduction = 0.8,
|
||||
turn_bias = 1.075,
|
||||
cardinal_directions = false,
|
||||
|
||||
-- Size of the vehicle, to be limited by physical restriction of the way
|
||||
vehicle_height = 2.0, -- in meters, 2.0m is the height slightly above biggest SUVs
|
||||
vehicle_width = 1.9, -- in meters, ways with narrow tag are considered narrower than 2.2m
|
||||
|
||||
-- Size of the vehicle, to be limited mostly by legal restriction of the way
|
||||
vehicle_length = 4.8, -- in meters, 4.8m is the length of large or family car
|
||||
vehicle_weight = 2000, -- in kilograms
|
||||
|
||||
-- a list of suffixes to suppress in name change instructions. The suffixes also include common substrings of each other
|
||||
suffix_list = {
|
||||
'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East', 'Nor', 'Sou', 'We', 'Ea'
|
||||
},
|
||||
|
||||
barrier_whitelist = Set {
|
||||
'cattle_grid',
|
||||
'border_control',
|
||||
'toll_booth',
|
||||
'sally_port',
|
||||
'gate',
|
||||
'lift_gate',
|
||||
'no',
|
||||
'entrance',
|
||||
'height_restrictor',
|
||||
'arch'
|
||||
},
|
||||
|
||||
access_tag_whitelist = Set {
|
||||
'yes',
|
||||
'motorcar',
|
||||
'motor_vehicle',
|
||||
'vehicle',
|
||||
'permissive',
|
||||
'designated',
|
||||
'hov'
|
||||
},
|
||||
|
||||
access_tag_blacklist = Set {
|
||||
'no',
|
||||
'delivery',
|
||||
'destination'
|
||||
},
|
||||
|
||||
-- tags disallow access to in combination with highway=service
|
||||
service_access_tag_blacklist = Set {
|
||||
'private'
|
||||
},
|
||||
|
||||
restricted_access_tag_list = Set {
|
||||
'delivery',
|
||||
'destination',
|
||||
},
|
||||
|
||||
access_tags_hierarchy = Sequence {
|
||||
'motorcar',
|
||||
'motor_vehicle',
|
||||
'vehicle',
|
||||
'access'
|
||||
},
|
||||
|
||||
service_tag_forbidden = Set {
|
||||
},
|
||||
|
||||
restrictions = Sequence {
|
||||
'motorcar',
|
||||
'motor_vehicle',
|
||||
'vehicle'
|
||||
},
|
||||
|
||||
classes = Sequence {
|
||||
'toll', 'motorway', 'ferry', 'restricted', 'tunnel'
|
||||
},
|
||||
|
||||
-- classes to support for exclude flags
|
||||
excludable = Sequence {
|
||||
Set {'toll'},
|
||||
Set {'motorway'},
|
||||
Set {'ferry'}
|
||||
},
|
||||
|
||||
avoid = Set {
|
||||
'area',
|
||||
-- 'toll', -- uncomment this to avoid tolls
|
||||
'reversible',
|
||||
'impassable',
|
||||
'hov_lanes',
|
||||
'steps',
|
||||
'construction',
|
||||
'proposed'
|
||||
},
|
||||
|
||||
speeds = Sequence {
|
||||
highway = {
|
||||
motorway = 120,
|
||||
motorway_link = 50,
|
||||
trunk = 90,
|
||||
trunk_link = 40,
|
||||
primary = 80,
|
||||
primary_link = 30,
|
||||
secondary = 70,
|
||||
secondary_link = 30,
|
||||
tertiary = 40,
|
||||
tertiary_link = 30,
|
||||
unclassified = 40,
|
||||
track = 30,
|
||||
residential = 20,
|
||||
living_street = 10,
|
||||
service = 15
|
||||
}
|
||||
},
|
||||
|
||||
service_penalties = {
|
||||
alley = 0.5,
|
||||
parking = 0.5,
|
||||
parking_aisle = 0.5,
|
||||
driveway = 0.5,
|
||||
["drive-through"] = 0.5,
|
||||
["drive-thru"] = 0.5
|
||||
},
|
||||
|
||||
restricted_highway_whitelist = Set {
|
||||
'motorway',
|
||||
'motorway_link',
|
||||
'trunk',
|
||||
'trunk_link',
|
||||
'primary',
|
||||
'primary_link',
|
||||
'secondary',
|
||||
'secondary_link',
|
||||
'tertiary',
|
||||
'tertiary_link',
|
||||
'residential',
|
||||
'living_street',
|
||||
'unclassified',
|
||||
'service',
|
||||
'track'
|
||||
},
|
||||
|
||||
construction_whitelist = Set {
|
||||
'no',
|
||||
'widening',
|
||||
'minor',
|
||||
},
|
||||
|
||||
route_speeds = {
|
||||
ferry = 5,
|
||||
shuttle_train = 10
|
||||
},
|
||||
|
||||
bridge_speeds = {
|
||||
movable = 5
|
||||
},
|
||||
|
||||
-- surface/trackype/smoothness
|
||||
-- values were estimated from looking at the photos at the relevant wiki pages
|
||||
|
||||
-- max speed for surfaces
|
||||
surface_speeds = {
|
||||
asphalt = nil, -- nil mean no limit. removing the line has the same effect
|
||||
concrete = nil,
|
||||
["concrete:plates"] = nil,
|
||||
["concrete:lanes"] = nil,
|
||||
paved = nil,
|
||||
|
||||
cement = 80,
|
||||
compacted = 80,
|
||||
fine_gravel = 80,
|
||||
|
||||
paving_stones = 60,
|
||||
metal = 60,
|
||||
bricks = 60,
|
||||
|
||||
grass = 40,
|
||||
wood = 40,
|
||||
sett = 40,
|
||||
grass_paver = 40,
|
||||
gravel = 40,
|
||||
unpaved = 40,
|
||||
ground = 40,
|
||||
dirt = 40,
|
||||
pebblestone = 40,
|
||||
tartan = 40,
|
||||
|
||||
cobblestone = 30,
|
||||
clay = 30,
|
||||
|
||||
earth = 20,
|
||||
stone = 20,
|
||||
rocky = 20,
|
||||
sand = 20,
|
||||
|
||||
mud = 10
|
||||
},
|
||||
|
||||
-- max speed for tracktypes
|
||||
tracktype_speeds = {
|
||||
grade1 = 60,
|
||||
grade2 = 40,
|
||||
grade3 = 30,
|
||||
grade4 = 25,
|
||||
grade5 = 20
|
||||
},
|
||||
|
||||
-- max speed for smoothnesses
|
||||
smoothness_speeds = {
|
||||
intermediate = 80,
|
||||
bad = 40,
|
||||
very_bad = 20,
|
||||
horrible = 10,
|
||||
very_horrible = 5,
|
||||
impassable = 0
|
||||
},
|
||||
|
||||
-- http://wiki.openstreetmap.org/wiki/Speed_limits
|
||||
maxspeed_table_default = {
|
||||
urban = 50,
|
||||
rural = 90,
|
||||
trunk = 110,
|
||||
motorway = 130
|
||||
},
|
||||
|
||||
-- List only exceptions
|
||||
maxspeed_table = {
|
||||
["at:rural"] = 100,
|
||||
["at:trunk"] = 100,
|
||||
["be:motorway"] = 120,
|
||||
["be-bru:rural"] = 70,
|
||||
["be-bru:urban"] = 30,
|
||||
["be-vlg:rural"] = 70,
|
||||
["by:urban"] = 60,
|
||||
["by:motorway"] = 110,
|
||||
["ch:rural"] = 80,
|
||||
["ch:trunk"] = 100,
|
||||
["ch:motorway"] = 120,
|
||||
["cz:trunk"] = 0,
|
||||
["cz:motorway"] = 0,
|
||||
["de:living_street"] = 7,
|
||||
["de:rural"] = 100,
|
||||
["de:motorway"] = 0,
|
||||
["dk:rural"] = 80,
|
||||
["fr:rural"] = 80,
|
||||
["gb:nsl_single"] = (60*1609)/1000,
|
||||
["gb:nsl_dual"] = (70*1609)/1000,
|
||||
["gb:motorway"] = (70*1609)/1000,
|
||||
["nl:rural"] = 80,
|
||||
["nl:trunk"] = 100,
|
||||
['no:rural'] = 80,
|
||||
['no:motorway'] = 110,
|
||||
['pl:rural'] = 100,
|
||||
['pl:trunk'] = 120,
|
||||
['pl:motorway'] = 140,
|
||||
["ro:trunk"] = 100,
|
||||
["ru:living_street"] = 20,
|
||||
["ru:urban"] = 60,
|
||||
["ru:motorway"] = 110,
|
||||
["uk:nsl_single"] = (60*1609)/1000,
|
||||
["uk:nsl_dual"] = (70*1609)/1000,
|
||||
["uk:motorway"] = (70*1609)/1000,
|
||||
['za:urban'] = 60,
|
||||
['za:rural'] = 100,
|
||||
["none"] = 140
|
||||
},
|
||||
|
||||
relation_types = Sequence {
|
||||
"route"
|
||||
},
|
||||
|
||||
-- classify highway tags when necessary for turn weights
|
||||
highway_turn_classification = {
|
||||
},
|
||||
|
||||
-- classify access tags when necessary for turn weights
|
||||
access_turn_classification = {
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
function process_node(profile, node, result, relations)
|
||||
-- parse access and barrier tags
|
||||
local access = find_access_tag(node, profile.access_tags_hierarchy)
|
||||
if access then
|
||||
if profile.access_tag_blacklist[access] and not profile.restricted_access_tag_list[access] then
|
||||
result.barrier = true
|
||||
end
|
||||
else
|
||||
local barrier = node:get_value_by_key("barrier")
|
||||
if barrier then
|
||||
-- check height restriction barriers
|
||||
local restricted_by_height = false
|
||||
if barrier == 'height_restrictor' then
|
||||
local maxheight = Measure.get_max_height(node:get_value_by_key("maxheight"), node)
|
||||
restricted_by_height = maxheight and maxheight < profile.vehicle_height
|
||||
end
|
||||
|
||||
-- make an exception for rising bollard barriers
|
||||
local bollard = node:get_value_by_key("bollard")
|
||||
local rising_bollard = bollard and "rising" == bollard
|
||||
|
||||
-- make an exception for lowered/flat barrier=kerb
|
||||
-- and incorrect tagging of highway crossing kerb as highway barrier
|
||||
local kerb = node:get_value_by_key("kerb")
|
||||
local highway = node:get_value_by_key("highway")
|
||||
local flat_kerb = kerb and ("lowered" == kerb or "flush" == kerb)
|
||||
local highway_crossing_kerb = barrier == "kerb" and highway and highway == "crossing"
|
||||
|
||||
if not profile.barrier_whitelist[barrier]
|
||||
and not rising_bollard
|
||||
and not flat_kerb
|
||||
and not highway_crossing_kerb
|
||||
or restricted_by_height then
|
||||
result.barrier = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if node is a traffic light
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if "traffic_signals" == tag then
|
||||
result.traffic_lights = true
|
||||
end
|
||||
end
|
||||
|
||||
function process_way(profile, way, result, relations)
|
||||
-- the intial filtering of ways based on presence of tags
|
||||
-- affects processing times significantly, because all ways
|
||||
-- have to be checked.
|
||||
-- to increase performance, prefetching and intial tag check
|
||||
-- is done in directly instead of via a handler.
|
||||
|
||||
-- in general we should try to abort as soon as
|
||||
-- possible if the way is not routable, to avoid doing
|
||||
-- unnecessary work. this implies we should check things that
|
||||
-- commonly forbids access early, and handle edge cases later.
|
||||
|
||||
-- data table for storing intermediate values during processing
|
||||
local data = {
|
||||
-- prefetch tags
|
||||
highway = way:get_value_by_key('highway'),
|
||||
bridge = way:get_value_by_key('bridge'),
|
||||
route = way:get_value_by_key('route')
|
||||
}
|
||||
|
||||
-- perform an quick initial check and abort if the way is
|
||||
-- obviously not routable.
|
||||
-- highway or route tags must be in data table, bridge is optional
|
||||
if (not data.highway or data.highway == '') and
|
||||
(not data.route or data.route == '')
|
||||
then
|
||||
return
|
||||
end
|
||||
|
||||
handlers = Sequence {
|
||||
-- set the default mode for this profile. if can be changed later
|
||||
-- in case it turns we're e.g. on a ferry
|
||||
WayHandlers.default_mode,
|
||||
|
||||
-- check various tags that could indicate that the way is not
|
||||
-- routable. this includes things like status=impassable,
|
||||
-- toll=yes and oneway=reversible
|
||||
WayHandlers.blocked_ways,
|
||||
WayHandlers.avoid_ways,
|
||||
WayHandlers.handle_height,
|
||||
WayHandlers.handle_width,
|
||||
WayHandlers.handle_length,
|
||||
WayHandlers.handle_weight,
|
||||
|
||||
-- determine access status by checking our hierarchy of
|
||||
-- access tags, e.g: motorcar, motor_vehicle, vehicle
|
||||
WayHandlers.access,
|
||||
|
||||
-- check whether forward/backward directions are routable
|
||||
WayHandlers.oneway,
|
||||
|
||||
-- check a road's destination
|
||||
WayHandlers.destinations,
|
||||
|
||||
-- check whether we're using a special transport mode
|
||||
WayHandlers.ferries,
|
||||
WayHandlers.movables,
|
||||
|
||||
-- handle service road restrictions
|
||||
WayHandlers.service,
|
||||
|
||||
-- handle hov
|
||||
WayHandlers.hov,
|
||||
|
||||
-- compute speed taking into account way type, maxspeed tags, etc.
|
||||
WayHandlers.speed,
|
||||
WayHandlers.maxspeed,
|
||||
WayHandlers.surface,
|
||||
WayHandlers.penalties,
|
||||
|
||||
-- compute class labels
|
||||
WayHandlers.classes,
|
||||
|
||||
-- handle turn lanes and road classification, used for guidance
|
||||
WayHandlers.turn_lanes,
|
||||
WayHandlers.classification,
|
||||
|
||||
-- handle various other flags
|
||||
WayHandlers.roundabouts,
|
||||
WayHandlers.startpoint,
|
||||
WayHandlers.driving_side,
|
||||
|
||||
-- set name, ref and pronunciation
|
||||
WayHandlers.names,
|
||||
|
||||
-- set weight properties of the way
|
||||
WayHandlers.weights,
|
||||
|
||||
-- set classification of ways relevant for turns
|
||||
WayHandlers.way_classification_for_turn
|
||||
}
|
||||
|
||||
WayHandlers.run(profile, way, result, data, handlers, relations)
|
||||
|
||||
if profile.cardinal_directions then
|
||||
Relations.process_way_refs(way, relations, result)
|
||||
end
|
||||
end
|
||||
|
||||
function process_turn(profile, turn)
|
||||
-- Use a sigmoid function to return a penalty that maxes out at turn_penalty
|
||||
-- over the space of 0-180 degrees. Values here were chosen by fitting
|
||||
-- the function to some turn penalty samples from real driving.
|
||||
local turn_penalty = profile.turn_penalty
|
||||
local turn_bias = turn.is_left_hand_driving and 1. / profile.turn_bias or profile.turn_bias
|
||||
|
||||
if turn.has_traffic_light then
|
||||
turn.duration = profile.properties.traffic_light_penalty
|
||||
end
|
||||
|
||||
if turn.number_of_roads > 2 or turn.source_mode ~= turn.target_mode or turn.is_u_turn then
|
||||
if turn.angle >= 0 then
|
||||
turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 / turn_bias) * turn.angle/180 - 6.5*turn_bias)))
|
||||
else
|
||||
turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 * turn_bias) * -turn.angle/180 - 6.5/turn_bias)))
|
||||
end
|
||||
|
||||
if turn.is_u_turn then
|
||||
turn.duration = turn.duration + profile.properties.u_turn_penalty
|
||||
end
|
||||
end
|
||||
|
||||
-- for distance based routing we don't want to have penalties based on turn angle
|
||||
if profile.properties.weight_name == 'distance' then
|
||||
turn.weight = 0
|
||||
else
|
||||
turn.weight = turn.duration
|
||||
end
|
||||
|
||||
if profile.properties.weight_name == 'routability' then
|
||||
-- penalize turns from non-local access only segments onto local access only tags
|
||||
if not turn.source_restricted and turn.target_restricted then
|
||||
turn.weight = constants.max_turn_weight
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
setup = setup,
|
||||
process_way = process_way,
|
||||
process_node = process_node,
|
||||
process_turn = process_turn
|
||||
}
|
504
admiral-router/vehicles/ev.lua
Normal file
504
admiral-router/vehicles/ev.lua
Normal file
@ -0,0 +1,504 @@
|
||||
-- Car profile
|
||||
|
||||
api_version = 4
|
||||
|
||||
Set = require('lib/set')
|
||||
Sequence = require('lib/sequence')
|
||||
Handlers = require("lib/way_handlers")
|
||||
Relations = require("lib/relations")
|
||||
find_access_tag = require("lib/access").find_access_tag
|
||||
limit = require("lib/maxspeed").limit
|
||||
Utils = require("lib/utils")
|
||||
Measure = require("lib/measure")
|
||||
|
||||
function setup()
|
||||
return {
|
||||
properties = {
|
||||
max_speed_for_map_matching = 45/3.6, -- 180kmph -> m/s
|
||||
-- For routing based on duration, but weighted for preferring certain roads
|
||||
-- weight_name = 'routability',
|
||||
-- For shortest duration without penalties for accessibility
|
||||
weight_name = 'duration',
|
||||
-- For shortest distance without penalties for accessibility
|
||||
-- weight_name = 'distance',
|
||||
process_call_tagless_node = false,
|
||||
u_turn_penalty = 20,
|
||||
continue_straight_at_waypoint = true,
|
||||
use_turn_restrictions = true,
|
||||
left_hand_driving = false,
|
||||
traffic_light_penalty = 2,
|
||||
},
|
||||
|
||||
default_mode = mode.driving,
|
||||
default_speed = 20,
|
||||
oneway_handling = true,
|
||||
side_road_multiplier = 0.9,
|
||||
turn_penalty = 4,
|
||||
speed_reduction = 0.9,
|
||||
turn_bias = 1.05,
|
||||
cardinal_directions = false,
|
||||
|
||||
-- Size of the vehicle, to be limited by physical restriction of the way
|
||||
vehicle_height = 1.5, -- in meters, 2.0m is the height slightly above biggest SUVs
|
||||
vehicle_width = 1.0, -- in meters, ways with narrow tag are considered narrower than 2.2m
|
||||
|
||||
-- Size of the vehicle, to be limited mostly by legal restriction of the way
|
||||
vehicle_length = 2, -- in meters, 4.8m is the length of large or family car
|
||||
vehicle_weight = 200, -- in kilograms
|
||||
|
||||
-- a list of suffixes to suppress in name change instructions. The suffixes also include common substrings of each other
|
||||
suffix_list = {
|
||||
'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East', 'Nor', 'Sou', 'We', 'Ea'
|
||||
},
|
||||
|
||||
barrier_whitelist = Set {
|
||||
'cattle_grid',
|
||||
'border_control',
|
||||
'toll_booth',
|
||||
'sally_port',
|
||||
'gate',
|
||||
'lift_gate',
|
||||
'no',
|
||||
'entrance',
|
||||
'height_restrictor',
|
||||
'arch'
|
||||
},
|
||||
|
||||
access_tag_whitelist = Set {
|
||||
'yes',
|
||||
'motorcar',
|
||||
"motorcycle",
|
||||
'motor_vehicle',
|
||||
'vehicle',
|
||||
'permissive',
|
||||
'designated',
|
||||
'hov'
|
||||
},
|
||||
|
||||
access_tag_blacklist = Set {
|
||||
'no',
|
||||
'destination'
|
||||
},
|
||||
|
||||
-- tags disallow access to in combination with highway=service
|
||||
service_access_tag_blacklist = Set {
|
||||
'private'
|
||||
},
|
||||
|
||||
restricted_access_tag_list = Set {
|
||||
'destination',
|
||||
},
|
||||
|
||||
access_tags_hierarchy = Sequence {
|
||||
'motorcar',
|
||||
'motor_vehicle',
|
||||
'vehicle',
|
||||
'access'
|
||||
},
|
||||
|
||||
service_tag_forbidden = Set {
|
||||
},
|
||||
|
||||
restrictions = Sequence {
|
||||
'motorcar',
|
||||
'motor_vehicle',
|
||||
'vehicle'
|
||||
},
|
||||
|
||||
classes = Sequence {
|
||||
'toll', 'motorway', 'ferry', 'restricted', 'tunnel'
|
||||
},
|
||||
|
||||
-- classes to support for exclude flags
|
||||
excludable = Sequence {
|
||||
Set {'toll'},
|
||||
Set {'motorway'},
|
||||
Set {'ferry'}
|
||||
},
|
||||
|
||||
avoid = Set {
|
||||
'area',
|
||||
-- 'toll', -- uncomment this to avoid tolls
|
||||
'reversible',
|
||||
'impassable',
|
||||
'hov_lanes',
|
||||
'steps',
|
||||
'construction',
|
||||
'proposed'
|
||||
},
|
||||
|
||||
speeds = Sequence {
|
||||
highway = {
|
||||
motorway = 45,
|
||||
motorway_link = 45,
|
||||
trunk = 45,
|
||||
trunk_link = 45,
|
||||
primary = 45,
|
||||
primary_link = 30,
|
||||
secondary = 45,
|
||||
secondary_link = 30,
|
||||
tertiary = 25,
|
||||
tertiary_link = 25,
|
||||
unclassified = 25,
|
||||
track = 20,
|
||||
residential = 14,
|
||||
living_street = 10,
|
||||
service = 10
|
||||
}
|
||||
},
|
||||
|
||||
service_penalties = {
|
||||
alley = 0.5,
|
||||
parking = 0.5,
|
||||
parking_aisle = 0.5,
|
||||
driveway = 0.5,
|
||||
["drive-through"] = 0.5,
|
||||
["drive-thru"] = 0.5
|
||||
},
|
||||
|
||||
restricted_highway_whitelist = Set {
|
||||
'motorway',
|
||||
'motorway_link',
|
||||
'trunk',
|
||||
'trunk_link',
|
||||
'primary',
|
||||
'primary_link',
|
||||
'secondary',
|
||||
'secondary_link',
|
||||
'tertiary',
|
||||
'tertiary_link',
|
||||
'residential',
|
||||
'living_street',
|
||||
'unclassified',
|
||||
'service',
|
||||
'track'
|
||||
},
|
||||
|
||||
construction_whitelist = Set {
|
||||
'no',
|
||||
'widening',
|
||||
'minor',
|
||||
},
|
||||
|
||||
route_speeds = {
|
||||
ferry = 5,
|
||||
shuttle_train = 10
|
||||
},
|
||||
|
||||
bridge_speeds = {
|
||||
movable = 5
|
||||
},
|
||||
|
||||
-- surface/trackype/smoothness
|
||||
-- values were estimated from looking at the photos at the relevant wiki pages
|
||||
|
||||
-- max speed for surfaces
|
||||
surface_speeds = {
|
||||
asphalt = nil, -- nil mean no limit. removing the line has the same effect
|
||||
concrete = nil,
|
||||
["concrete:plates"] = nil,
|
||||
["concrete:lanes"] = nil,
|
||||
paved = nil,
|
||||
|
||||
cement = 80,
|
||||
compacted = 80,
|
||||
fine_gravel = 80,
|
||||
|
||||
paving_stones = 60,
|
||||
metal = 60,
|
||||
bricks = 60,
|
||||
|
||||
grass = 40,
|
||||
wood = 40,
|
||||
sett = 40,
|
||||
grass_paver = 40,
|
||||
gravel = 40,
|
||||
unpaved = 40,
|
||||
ground = 40,
|
||||
dirt = 40,
|
||||
pebblestone = 40,
|
||||
tartan = 40,
|
||||
|
||||
cobblestone = 30,
|
||||
clay = 30,
|
||||
|
||||
earth = 20,
|
||||
stone = 20,
|
||||
rocky = 20,
|
||||
sand = 20,
|
||||
|
||||
mud = 10
|
||||
},
|
||||
|
||||
-- max speed for tracktypes
|
||||
tracktype_speeds = {
|
||||
grade1 = 60,
|
||||
grade2 = 40,
|
||||
grade3 = 30,
|
||||
grade4 = 25,
|
||||
grade5 = 20
|
||||
},
|
||||
|
||||
-- max speed for smoothnesses
|
||||
smoothness_speeds = {
|
||||
intermediate = 80,
|
||||
bad = 40,
|
||||
very_bad = 20,
|
||||
horrible = 10,
|
||||
very_horrible = 5,
|
||||
impassable = 0
|
||||
},
|
||||
|
||||
-- http://wiki.openstreetmap.org/wiki/Speed_limits
|
||||
maxspeed_table_default = {
|
||||
urban = 50,
|
||||
rural = 90,
|
||||
trunk = 100,
|
||||
motorway = 100
|
||||
},
|
||||
|
||||
-- List only exceptions
|
||||
maxspeed_table = {
|
||||
["at:rural"] = 100,
|
||||
["at:trunk"] = 100,
|
||||
["be:motorway"] = 120,
|
||||
["be-bru:rural"] = 70,
|
||||
["be-bru:urban"] = 30,
|
||||
["be-vlg:rural"] = 70,
|
||||
["by:urban"] = 60,
|
||||
["by:motorway"] = 100,
|
||||
["ch:rural"] = 80,
|
||||
["ch:trunk"] = 100,
|
||||
["ch:motorway"] = 100,
|
||||
["cz:trunk"] = 0,
|
||||
["cz:motorway"] = 0,
|
||||
["de:living_street"] = 7,
|
||||
["de:rural"] = 100,
|
||||
["de:motorway"] = 0,
|
||||
["dk:rural"] = 80,
|
||||
["fr:rural"] = 80,
|
||||
["gb:nsl_single"] = (60*1609)/1000,
|
||||
["gb:nsl_dual"] = (70*1609)/1000,
|
||||
["gb:motorway"] = (70*1609)/1000,
|
||||
["nl:rural"] = 80,
|
||||
["nl:trunk"] = 100,
|
||||
['no:rural'] = 80,
|
||||
['no:motorway'] = 100,
|
||||
['pl:rural'] = 100,
|
||||
['pl:trunk'] = 100,
|
||||
['pl:motorway'] = 100,
|
||||
["ro:trunk"] = 100,
|
||||
["ru:living_street"] = 20,
|
||||
["ru:urban"] = 60,
|
||||
["ru:motorway"] = 100,
|
||||
["uk:nsl_single"] = (60*1609)/1000,
|
||||
["uk:nsl_dual"] = (70*1609)/1000,
|
||||
["uk:motorway"] = (70*1609)/1000,
|
||||
['za:urban'] = 60,
|
||||
['za:rural'] = 100,
|
||||
["none"] = 100
|
||||
},
|
||||
|
||||
relation_types = Sequence {
|
||||
"route"
|
||||
},
|
||||
|
||||
-- classify highway tags when necessary for turn weights
|
||||
highway_turn_classification = {
|
||||
},
|
||||
|
||||
-- classify access tags when necessary for turn weights
|
||||
access_turn_classification = {
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
function process_node(profile, node, result, relations)
|
||||
-- parse access and barrier tags
|
||||
local access = find_access_tag(node, profile.access_tags_hierarchy)
|
||||
if access then
|
||||
if profile.access_tag_blacklist[access] and not profile.restricted_access_tag_list[access] then
|
||||
result.barrier = true
|
||||
end
|
||||
else
|
||||
local barrier = node:get_value_by_key("barrier")
|
||||
if barrier then
|
||||
-- check height restriction barriers
|
||||
local restricted_by_height = false
|
||||
if barrier == 'height_restrictor' then
|
||||
local maxheight = Measure.get_max_height(node:get_value_by_key("maxheight"), node)
|
||||
restricted_by_height = maxheight and maxheight < profile.vehicle_height
|
||||
end
|
||||
|
||||
-- make an exception for rising bollard barriers
|
||||
local bollard = node:get_value_by_key("bollard")
|
||||
local rising_bollard = bollard and "rising" == bollard
|
||||
|
||||
-- make an exception for lowered/flat barrier=kerb
|
||||
-- and incorrect tagging of highway crossing kerb as highway barrier
|
||||
local kerb = node:get_value_by_key("kerb")
|
||||
local highway = node:get_value_by_key("highway")
|
||||
local flat_kerb = kerb and ("lowered" == kerb or "flush" == kerb)
|
||||
local highway_crossing_kerb = barrier == "kerb" and highway and highway == "crossing"
|
||||
|
||||
if not profile.barrier_whitelist[barrier]
|
||||
and not rising_bollard
|
||||
and not flat_kerb
|
||||
and not highway_crossing_kerb
|
||||
or restricted_by_height then
|
||||
result.barrier = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if node is a traffic light
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if "traffic_signals" == tag then
|
||||
result.traffic_lights = true
|
||||
end
|
||||
end
|
||||
|
||||
function process_way(profile, way, result, relations)
|
||||
-- the intial filtering of ways based on presence of tags
|
||||
-- affects processing times significantly, because all ways
|
||||
-- have to be checked.
|
||||
-- to increase performance, prefetching and intial tag check
|
||||
-- is done in directly instead of via a handler.
|
||||
|
||||
-- in general we should try to abort as soon as
|
||||
-- possible if the way is not routable, to avoid doing
|
||||
-- unnecessary work. this implies we should check things that
|
||||
-- commonly forbids access early, and handle edge cases later.
|
||||
|
||||
-- data table for storing intermediate values during processing
|
||||
local data = {
|
||||
-- prefetch tags
|
||||
highway = way:get_value_by_key('highway'),
|
||||
bridge = way:get_value_by_key('bridge'),
|
||||
route = way:get_value_by_key('route')
|
||||
}
|
||||
|
||||
-- perform an quick initial check and abort if the way is
|
||||
-- obviously not routable.
|
||||
-- highway or route tags must be in data table, bridge is optional
|
||||
if (not data.highway or data.highway == '') and
|
||||
(not data.route or data.route == '')
|
||||
then
|
||||
return
|
||||
end
|
||||
|
||||
handlers = Sequence {
|
||||
-- set the default mode for this profile. if can be changed later
|
||||
-- in case it turns we're e.g. on a ferry
|
||||
WayHandlers.default_mode,
|
||||
|
||||
-- check various tags that could indicate that the way is not
|
||||
-- routable. this includes things like status=impassable,
|
||||
-- toll=yes and oneway=reversible
|
||||
WayHandlers.blocked_ways,
|
||||
WayHandlers.avoid_ways,
|
||||
WayHandlers.handle_height,
|
||||
WayHandlers.handle_width,
|
||||
WayHandlers.handle_length,
|
||||
WayHandlers.handle_weight,
|
||||
|
||||
-- determine access status by checking our hierarchy of
|
||||
-- access tags, e.g: motorcar, motor_vehicle, vehicle
|
||||
WayHandlers.access,
|
||||
|
||||
-- check whether forward/backward directions are routable
|
||||
WayHandlers.oneway,
|
||||
|
||||
-- check a road's destination
|
||||
WayHandlers.destinations,
|
||||
|
||||
-- check whether we're using a special transport mode
|
||||
WayHandlers.ferries,
|
||||
WayHandlers.movables,
|
||||
|
||||
-- handle service road restrictions
|
||||
WayHandlers.service,
|
||||
|
||||
-- handle hov
|
||||
WayHandlers.hov,
|
||||
|
||||
-- compute speed taking into account way type, maxspeed tags, etc.
|
||||
WayHandlers.speed,
|
||||
WayHandlers.maxspeed,
|
||||
WayHandlers.surface,
|
||||
WayHandlers.penalties,
|
||||
|
||||
-- compute class labels
|
||||
WayHandlers.classes,
|
||||
|
||||
-- handle turn lanes and road classification, used for guidance
|
||||
WayHandlers.turn_lanes,
|
||||
WayHandlers.classification,
|
||||
|
||||
-- handle various other flags
|
||||
WayHandlers.roundabouts,
|
||||
WayHandlers.startpoint,
|
||||
WayHandlers.driving_side,
|
||||
|
||||
-- set name, ref and pronunciation
|
||||
WayHandlers.names,
|
||||
|
||||
-- set weight properties of the way
|
||||
WayHandlers.weights,
|
||||
|
||||
-- set classification of ways relevant for turns
|
||||
WayHandlers.way_classification_for_turn
|
||||
}
|
||||
|
||||
WayHandlers.run(profile, way, result, data, handlers, relations)
|
||||
|
||||
if profile.cardinal_directions then
|
||||
Relations.process_way_refs(way, relations, result)
|
||||
end
|
||||
end
|
||||
|
||||
function process_turn(profile, turn)
|
||||
-- Use a sigmoid function to return a penalty that maxes out at turn_penalty
|
||||
-- over the space of 0-180 degrees. Values here were chosen by fitting
|
||||
-- the function to some turn penalty samples from real driving.
|
||||
local turn_penalty = profile.turn_penalty
|
||||
local turn_bias = turn.is_left_hand_driving and 1. / profile.turn_bias or profile.turn_bias
|
||||
|
||||
if turn.has_traffic_light then
|
||||
turn.duration = profile.properties.traffic_light_penalty
|
||||
end
|
||||
|
||||
if turn.number_of_roads > 2 or turn.source_mode ~= turn.target_mode or turn.is_u_turn then
|
||||
if turn.angle >= 0 then
|
||||
turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 / turn_bias) * turn.angle/180 - 6.5*turn_bias)))
|
||||
else
|
||||
turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 * turn_bias) * -turn.angle/180 - 6.5/turn_bias)))
|
||||
end
|
||||
|
||||
if turn.is_u_turn then
|
||||
turn.duration = turn.duration + profile.properties.u_turn_penalty
|
||||
end
|
||||
end
|
||||
|
||||
-- for distance based routing we don't want to have penalties based on turn angle
|
||||
if profile.properties.weight_name == 'distance' then
|
||||
turn.weight = 0
|
||||
else
|
||||
turn.weight = turn.duration
|
||||
end
|
||||
|
||||
if profile.properties.weight_name == 'routability' then
|
||||
-- penalize turns from non-local access only segments onto local access only tags
|
||||
if not turn.source_restricted and turn.target_restricted then
|
||||
turn.weight = constants.max_turn_weight
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
return {
|
||||
setup = setup,
|
||||
process_way = process_way,
|
||||
process_node = process_node,
|
||||
process_turn = process_turn
|
||||
}
|
683
admiral-router/vehicles/km.lua
Normal file
683
admiral-router/vehicles/km.lua
Normal file
@ -0,0 +1,683 @@
|
||||
-- Bicycle profile
|
||||
|
||||
api_version = 4
|
||||
|
||||
Set = require('lib/set')
|
||||
Sequence = require('lib/sequence')
|
||||
Handlers = require("lib/way_handlers")
|
||||
find_access_tag = require("lib/access").find_access_tag
|
||||
limit = require("lib/maxspeed").limit
|
||||
Measure = require("lib/measure")
|
||||
|
||||
function setup()
|
||||
local max_speed = 50
|
||||
local default_speed = 22
|
||||
local walking_speed = 5
|
||||
|
||||
return {
|
||||
properties = {
|
||||
u_turn_penalty = 20,
|
||||
traffic_light_penalty = 2,
|
||||
--weight_name = 'cyclability',
|
||||
weight_name = 'duration',
|
||||
-- weight_name = 'distance',
|
||||
process_call_tagless_node = false,
|
||||
max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s
|
||||
use_turn_restrictions = false,
|
||||
continue_straight_at_waypoint = false,
|
||||
mode_change_penalty = 30,
|
||||
},
|
||||
|
||||
default_mode = mode.cycling,
|
||||
default_speed = default_speed,
|
||||
walking_speed = walking_speed,
|
||||
oneway_handling = true,
|
||||
turn_penalty = 6,
|
||||
turn_bias = 1.4,
|
||||
use_public_transport = true,
|
||||
|
||||
allowed_start_modes = Set {
|
||||
mode.cycling,
|
||||
mode.pushing_bike
|
||||
},
|
||||
|
||||
barrier_blacklist = Set {
|
||||
'yes',
|
||||
'wall',
|
||||
'fence'
|
||||
},
|
||||
|
||||
access_tag_whitelist = Set {
|
||||
'yes',
|
||||
'permissive',
|
||||
'designated'
|
||||
},
|
||||
|
||||
access_tag_blacklist = Set {
|
||||
'no',
|
||||
-- When a way is tagged with `use_sidepath` a parallel way suitable for
|
||||
-- cyclists is mapped and must be used instead (by law). This tag is
|
||||
-- used on ways that normally may be used by cyclists, but not when
|
||||
-- a signposted parallel cycleway is available. For purposes of routing
|
||||
-- cyclists, this value should be treated as 'no access for bicycles'.
|
||||
'use_sidepath'
|
||||
},
|
||||
|
||||
restricted_access_tag_list = Set { },
|
||||
|
||||
restricted_highway_whitelist = Set { },
|
||||
|
||||
-- tags disallow access to in combination with highway=service
|
||||
service_access_tag_blacklist = Set { },
|
||||
|
||||
construction_whitelist = Set {
|
||||
'no',
|
||||
'widening',
|
||||
'minor',
|
||||
},
|
||||
|
||||
access_tags_hierarchy = Sequence {
|
||||
'bicycle',
|
||||
'vehicle',
|
||||
'access'
|
||||
},
|
||||
|
||||
restrictions = Set {
|
||||
'bicycle'
|
||||
},
|
||||
|
||||
cycleway_tags = Set {
|
||||
'track',
|
||||
'lane',
|
||||
'share_busway',
|
||||
'sharrow',
|
||||
'shared',
|
||||
'shared_lane'
|
||||
},
|
||||
|
||||
opposite_cycleway_tags = Set {
|
||||
'opposite',
|
||||
'opposite_lane',
|
||||
'opposite_track',
|
||||
},
|
||||
|
||||
-- reduce the driving speed by 30% for unsafe roads
|
||||
-- only used for cyclability metric
|
||||
unsafe_highway_list = {
|
||||
primary = 0.5,
|
||||
secondary = 0.65,
|
||||
tertiary = 0.8,
|
||||
primary_link = 0.5,
|
||||
secondary_link = 0.65,
|
||||
tertiary_link = 0.8,
|
||||
},
|
||||
|
||||
service_penalties = {
|
||||
alley = 0.5,
|
||||
},
|
||||
|
||||
bicycle_speeds = {
|
||||
cycleway = default_speed,
|
||||
primary = 45,
|
||||
primary_link = 30,
|
||||
secondary = 45,
|
||||
secondary_link = 30,
|
||||
tertiary = 25,
|
||||
tertiary_link = 25,
|
||||
residential = 14,
|
||||
unclassified = 25,
|
||||
living_street = 10,
|
||||
road = default_speed,
|
||||
service = 10,
|
||||
track = 12,
|
||||
path = 12
|
||||
},
|
||||
|
||||
pedestrian_speeds = {
|
||||
footway = walking_speed,
|
||||
pedestrian = walking_speed,
|
||||
steps = 2
|
||||
},
|
||||
|
||||
railway_speeds = {
|
||||
train = 10,
|
||||
railway = 10,
|
||||
subway = 10,
|
||||
light_rail = 10,
|
||||
monorail = 10,
|
||||
tram = 10
|
||||
},
|
||||
|
||||
platform_speeds = {
|
||||
platform = walking_speed
|
||||
},
|
||||
|
||||
amenity_speeds = {
|
||||
parking = 10,
|
||||
parking_entrance = 10
|
||||
},
|
||||
|
||||
man_made_speeds = {
|
||||
pier = walking_speed
|
||||
},
|
||||
|
||||
route_speeds = {
|
||||
ferry = 5
|
||||
},
|
||||
|
||||
bridge_speeds = {
|
||||
movable = 5
|
||||
},
|
||||
|
||||
surface_speeds = {
|
||||
asphalt = default_speed,
|
||||
["cobblestone:flattened"] = 10,
|
||||
paving_stones = 10,
|
||||
compacted = 10,
|
||||
cobblestone = 6,
|
||||
unpaved = 6,
|
||||
fine_gravel = 6,
|
||||
gravel = 6,
|
||||
pebblestone = 6,
|
||||
ground = 6,
|
||||
dirt = 6,
|
||||
earth = 6,
|
||||
grass = 6,
|
||||
mud = 3,
|
||||
sand = 3,
|
||||
sett = 10
|
||||
},
|
||||
|
||||
classes = Sequence {
|
||||
'ferry', 'tunnel'
|
||||
},
|
||||
|
||||
-- Which classes should be excludable
|
||||
-- This increases memory usage so its disabled by default.
|
||||
excludable = Sequence {
|
||||
-- Set {'ferry'}
|
||||
},
|
||||
|
||||
tracktype_speeds = {
|
||||
},
|
||||
|
||||
smoothness_speeds = {
|
||||
},
|
||||
|
||||
avoid = Set {
|
||||
'impassable',
|
||||
'construction'
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
function process_node(profile, node, result)
|
||||
-- parse access and barrier tags
|
||||
local highway = node:get_value_by_key("highway")
|
||||
local is_crossing = highway and highway == "crossing"
|
||||
|
||||
local access = find_access_tag(node, profile.access_tags_hierarchy)
|
||||
if access and access ~= "" then
|
||||
-- access restrictions on crossing nodes are not relevant for
|
||||
-- the traffic on the road
|
||||
if profile.access_tag_blacklist[access] and not is_crossing then
|
||||
result.barrier = true
|
||||
end
|
||||
else
|
||||
local barrier = node:get_value_by_key("barrier")
|
||||
if barrier and "" ~= barrier then
|
||||
if profile.barrier_blacklist[barrier] then
|
||||
result.barrier = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if node is a traffic light
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if tag and "traffic_signals" == tag then
|
||||
result.traffic_lights = true
|
||||
end
|
||||
end
|
||||
|
||||
function handle_bicycle_tags(profile,way,result,data)
|
||||
-- initial routability check, filters out buildings, boundaries, etc
|
||||
data.route = way:get_value_by_key("route")
|
||||
data.man_made = way:get_value_by_key("man_made")
|
||||
data.railway = way:get_value_by_key("railway")
|
||||
data.amenity = way:get_value_by_key("amenity")
|
||||
data.public_transport = way:get_value_by_key("public_transport")
|
||||
data.bridge = way:get_value_by_key("bridge")
|
||||
|
||||
if (not data.highway or data.highway == '') and
|
||||
(not data.route or data.route == '') and
|
||||
(not profile.use_public_transport or not data.railway or data.railway=='') and
|
||||
(not data.amenity or data.amenity=='') and
|
||||
(not data.man_made or data.man_made=='') and
|
||||
(not data.public_transport or data.public_transport=='') and
|
||||
(not data.bridge or data.bridge=='')
|
||||
then
|
||||
return false
|
||||
end
|
||||
|
||||
-- access
|
||||
data.access = find_access_tag(way, profile.access_tags_hierarchy)
|
||||
if data.access and profile.access_tag_blacklist[data.access] then
|
||||
return false
|
||||
end
|
||||
|
||||
-- other tags
|
||||
data.junction = way:get_value_by_key("junction")
|
||||
data.maxspeed = Measure.get_max_speed(way:get_value_by_key ("maxspeed")) or 0
|
||||
data.maxspeed_forward = Measure.get_max_speed(way:get_value_by_key("maxspeed:forward")) or 0
|
||||
data.maxspeed_backward = Measure.get_max_speed(way:get_value_by_key("maxspeed:backward")) or 0
|
||||
data.barrier = way:get_value_by_key("barrier")
|
||||
data.oneway = way:get_value_by_key("oneway")
|
||||
data.oneway_bicycle = way:get_value_by_key("oneway:bicycle")
|
||||
data.cycleway = way:get_value_by_key("cycleway")
|
||||
data.cycleway_left = way:get_value_by_key("cycleway:left")
|
||||
data.cycleway_right = way:get_value_by_key("cycleway:right")
|
||||
data.duration = way:get_value_by_key("duration")
|
||||
data.service = way:get_value_by_key("service")
|
||||
data.foot = way:get_value_by_key("foot")
|
||||
data.foot_forward = way:get_value_by_key("foot:forward")
|
||||
data.foot_backward = way:get_value_by_key("foot:backward")
|
||||
data.bicycle = way:get_value_by_key("bicycle")
|
||||
|
||||
speed_handler(profile,way,result,data)
|
||||
|
||||
oneway_handler(profile,way,result,data)
|
||||
|
||||
cycleway_handler(profile,way,result,data)
|
||||
|
||||
bike_push_handler(profile,way,result,data)
|
||||
|
||||
|
||||
-- maxspeed
|
||||
limit( result, data.maxspeed, data.maxspeed_forward, data.maxspeed_backward )
|
||||
|
||||
-- not routable if no speed assigned
|
||||
-- this avoid assertions in debug builds
|
||||
if result.forward_speed <= 0 and result.duration <= 0 then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
if result.backward_speed <= 0 and result.duration <= 0 then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
safety_handler(profile,way,result,data)
|
||||
end
|
||||
|
||||
|
||||
|
||||
function speed_handler(profile,way,result,data)
|
||||
|
||||
data.way_type_allows_pushing = false
|
||||
|
||||
-- speed
|
||||
local bridge_speed = profile.bridge_speeds[data.bridge]
|
||||
if (bridge_speed and bridge_speed > 0) then
|
||||
data.highway = data.bridge
|
||||
if data.duration and durationIsValid(data.duration) then
|
||||
result.duration = math.max( parseDuration(data.duration), 1 )
|
||||
end
|
||||
result.forward_speed = bridge_speed
|
||||
result.backward_speed = bridge_speed
|
||||
data.way_type_allows_pushing = true
|
||||
elseif profile.route_speeds[data.route] then
|
||||
-- ferries (doesn't cover routes tagged using relations)
|
||||
result.forward_mode = mode.ferry
|
||||
result.backward_mode = mode.ferry
|
||||
if data.duration and durationIsValid(data.duration) then
|
||||
result.duration = math.max( 1, parseDuration(data.duration) )
|
||||
else
|
||||
result.forward_speed = profile.route_speeds[data.route]
|
||||
result.backward_speed = profile.route_speeds[data.route]
|
||||
end
|
||||
-- railway platforms (old tagging scheme)
|
||||
elseif data.railway and profile.platform_speeds[data.railway] then
|
||||
result.forward_speed = profile.platform_speeds[data.railway]
|
||||
result.backward_speed = profile.platform_speeds[data.railway]
|
||||
data.way_type_allows_pushing = true
|
||||
-- public_transport platforms (new tagging platform)
|
||||
elseif data.public_transport and profile.platform_speeds[data.public_transport] then
|
||||
result.forward_speed = profile.platform_speeds[data.public_transport]
|
||||
result.backward_speed = profile.platform_speeds[data.public_transport]
|
||||
data.way_type_allows_pushing = true
|
||||
-- railways
|
||||
elseif profile.use_public_transport and data.railway and profile.railway_speeds[data.railway] and profile.access_tag_whitelist[data.access] then
|
||||
result.forward_mode = mode.train
|
||||
result.backward_mode = mode.train
|
||||
result.forward_speed = profile.railway_speeds[data.railway]
|
||||
result.backward_speed = profile.railway_speeds[data.railway]
|
||||
elseif data.amenity and profile.amenity_speeds[data.amenity] then
|
||||
-- parking areas
|
||||
result.forward_speed = profile.amenity_speeds[data.amenity]
|
||||
result.backward_speed = profile.amenity_speeds[data.amenity]
|
||||
data.way_type_allows_pushing = true
|
||||
elseif profile.bicycle_speeds[data.highway] then
|
||||
-- regular ways
|
||||
result.forward_speed = profile.bicycle_speeds[data.highway]
|
||||
result.backward_speed = profile.bicycle_speeds[data.highway]
|
||||
data.way_type_allows_pushing = true
|
||||
elseif data.access and profile.access_tag_whitelist[data.access] then
|
||||
-- unknown way, but valid access tag
|
||||
result.forward_speed = profile.default_speed
|
||||
result.backward_speed = profile.default_speed
|
||||
data.way_type_allows_pushing = true
|
||||
end
|
||||
end
|
||||
|
||||
function oneway_handler(profile,way,result,data)
|
||||
-- oneway
|
||||
data.implied_oneway = data.junction == "roundabout" or data.junction == "circular" or data.highway == "motorway"
|
||||
data.reverse = false
|
||||
|
||||
if data.oneway_bicycle == "yes" or data.oneway_bicycle == "1" or data.oneway_bicycle == "true" then
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif data.oneway_bicycle == "no" or data.oneway_bicycle == "0" or data.oneway_bicycle == "false" then
|
||||
-- prevent other cases
|
||||
elseif data.oneway_bicycle == "-1" then
|
||||
result.forward_mode = mode.inaccessible
|
||||
data.reverse = true
|
||||
elseif data.oneway == "yes" or data.oneway == "1" or data.oneway == "true" then
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif data.oneway == "no" or data.oneway == "0" or data.oneway == "false" then
|
||||
-- prevent other cases
|
||||
elseif data.oneway == "-1" then
|
||||
result.forward_mode = mode.inaccessible
|
||||
data.reverse = true
|
||||
elseif data.implied_oneway then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
function cycleway_handler(profile,way,result,data)
|
||||
-- cycleway
|
||||
data.has_cycleway_forward = false
|
||||
data.has_cycleway_backward = false
|
||||
data.is_twoway = result.forward_mode ~= mode.inaccessible and result.backward_mode ~= mode.inaccessible and not data.implied_oneway
|
||||
|
||||
-- cycleways on normal roads
|
||||
if data.is_twoway then
|
||||
if data.cycleway and profile.cycleway_tags[data.cycleway] then
|
||||
data.has_cycleway_backward = true
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
if (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) then
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
if (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) then
|
||||
data.has_cycleway_backward = true
|
||||
end
|
||||
else
|
||||
local has_twoway_cycleway = (data.cycleway and profile.opposite_cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left])
|
||||
local has_opposite_cycleway = (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right])
|
||||
local has_oneway_cycleway = (data.cycleway and profile.cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.cycleway_tags[data.cycleway_left])
|
||||
|
||||
-- set cycleway even though it is an one-way if opposite is tagged
|
||||
if has_twoway_cycleway then
|
||||
data.has_cycleway_backward = true
|
||||
data.has_cycleway_forward = true
|
||||
elseif has_opposite_cycleway then
|
||||
if not data.reverse then
|
||||
data.has_cycleway_backward = true
|
||||
else
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
elseif has_oneway_cycleway then
|
||||
if not data.reverse then
|
||||
data.has_cycleway_forward = true
|
||||
else
|
||||
data.has_cycleway_backward = true
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
if data.has_cycleway_backward then
|
||||
result.backward_mode = mode.cycling
|
||||
result.backward_speed = profile.bicycle_speeds["cycleway"]
|
||||
end
|
||||
|
||||
if data.has_cycleway_forward then
|
||||
result.forward_mode = mode.cycling
|
||||
result.forward_speed = profile.bicycle_speeds["cycleway"]
|
||||
end
|
||||
end
|
||||
|
||||
function bike_push_handler(profile,way,result,data)
|
||||
-- pushing bikes - if no other mode found
|
||||
if result.forward_mode == mode.inaccessible or result.backward_mode == mode.inaccessible or
|
||||
result.forward_speed == -1 or result.backward_speed == -1 then
|
||||
if data.foot ~= 'no' then
|
||||
local push_forward_speed = nil
|
||||
local push_backward_speed = nil
|
||||
|
||||
if profile.pedestrian_speeds[data.highway] then
|
||||
push_forward_speed = profile.pedestrian_speeds[data.highway]
|
||||
push_backward_speed = profile.pedestrian_speeds[data.highway]
|
||||
elseif data.man_made and profile.man_made_speeds[data.man_made] then
|
||||
push_forward_speed = profile.man_made_speeds[data.man_made]
|
||||
push_backward_speed = profile.man_made_speeds[data.man_made]
|
||||
else
|
||||
if data.foot == 'yes' then
|
||||
push_forward_speed = profile.walking_speed
|
||||
if not data.implied_oneway then
|
||||
push_backward_speed = profile.walking_speed
|
||||
end
|
||||
elseif data.foot_forward == 'yes' then
|
||||
push_forward_speed = profile.walking_speed
|
||||
elseif data.foot_backward == 'yes' then
|
||||
push_backward_speed = profile.walking_speed
|
||||
elseif data.way_type_allows_pushing then
|
||||
push_forward_speed = profile.walking_speed
|
||||
if not data.implied_oneway then
|
||||
push_backward_speed = profile.walking_speed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if push_forward_speed and (result.forward_mode == mode.inaccessible or result.forward_speed == -1) then
|
||||
result.forward_mode = mode.pushing_bike
|
||||
result.forward_speed = push_forward_speed
|
||||
end
|
||||
if push_backward_speed and (result.backward_mode == mode.inaccessible or result.backward_speed == -1)then
|
||||
result.backward_mode = mode.pushing_bike
|
||||
result.backward_speed = push_backward_speed
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
-- dismount
|
||||
if data.bicycle == "dismount" then
|
||||
result.forward_mode = mode.pushing_bike
|
||||
result.backward_mode = mode.pushing_bike
|
||||
result.forward_speed = profile.walking_speed
|
||||
result.backward_speed = profile.walking_speed
|
||||
end
|
||||
end
|
||||
|
||||
function safety_handler(profile,way,result,data)
|
||||
-- convert duration into cyclability
|
||||
if profile.properties.weight_name == 'cyclability' then
|
||||
local safety_penalty = profile.unsafe_highway_list[data.highway] or 1.
|
||||
local is_unsafe = safety_penalty < 1
|
||||
|
||||
-- primaries that are one ways are probably huge primaries where the lanes need to be separated
|
||||
if is_unsafe and data.highway == 'primary' and not data.is_twoway then
|
||||
safety_penalty = safety_penalty * 0.5
|
||||
end
|
||||
if is_unsafe and data.highway == 'secondary' and not data.is_twoway then
|
||||
safety_penalty = safety_penalty * 0.6
|
||||
end
|
||||
|
||||
local forward_is_unsafe = is_unsafe and not data.has_cycleway_forward
|
||||
local backward_is_unsafe = is_unsafe and not data.has_cycleway_backward
|
||||
local is_undesireable = data.highway == "service" and profile.service_penalties[data.service]
|
||||
local forward_penalty = 1.
|
||||
local backward_penalty = 1.
|
||||
if forward_is_unsafe then
|
||||
forward_penalty = math.min(forward_penalty, safety_penalty)
|
||||
end
|
||||
if backward_is_unsafe then
|
||||
backward_penalty = math.min(backward_penalty, safety_penalty)
|
||||
end
|
||||
|
||||
if is_undesireable then
|
||||
forward_penalty = math.min(forward_penalty, profile.service_penalties[data.service])
|
||||
backward_penalty = math.min(backward_penalty, profile.service_penalties[data.service])
|
||||
end
|
||||
|
||||
if result.forward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.forward_rate = result.forward_speed / 3.6 * forward_penalty
|
||||
end
|
||||
if result.backward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.backward_rate = result.backward_speed / 3.6 * backward_penalty
|
||||
end
|
||||
if result.duration > 0 then
|
||||
result.weight = result.duration / forward_penalty
|
||||
end
|
||||
|
||||
if data.highway == "bicycle" then
|
||||
safety_bonus = safety_bonus + 0.2
|
||||
if result.forward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.forward_rate = result.forward_speed / 3.6 * safety_bonus
|
||||
end
|
||||
if result.backward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.backward_rate = result.backward_speed / 3.6 * safety_bonus
|
||||
end
|
||||
if result.duration > 0 then
|
||||
result.weight = result.duration / safety_bonus
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
function process_way(profile, way, result)
|
||||
-- the initial filtering of ways based on presence of tags
|
||||
-- affects processing times significantly, because all ways
|
||||
-- have to be checked.
|
||||
-- to increase performance, prefetching and initial tag check
|
||||
-- is done directly instead of via a handler.
|
||||
|
||||
-- in general we should try to abort as soon as
|
||||
-- possible if the way is not routable, to avoid doing
|
||||
-- unnecessary work. this implies we should check things that
|
||||
-- commonly forbids access early, and handle edge cases later.
|
||||
|
||||
-- data table for storing intermediate values during processing
|
||||
|
||||
local data = {
|
||||
-- prefetch tags
|
||||
highway = way:get_value_by_key('highway'),
|
||||
|
||||
route = nil,
|
||||
man_made = nil,
|
||||
railway = nil,
|
||||
amenity = nil,
|
||||
public_transport = nil,
|
||||
bridge = nil,
|
||||
|
||||
access = nil,
|
||||
|
||||
junction = nil,
|
||||
maxspeed = nil,
|
||||
maxspeed_forward = nil,
|
||||
maxspeed_backward = nil,
|
||||
barrier = nil,
|
||||
oneway = nil,
|
||||
oneway_bicycle = nil,
|
||||
cycleway = nil,
|
||||
cycleway_left = nil,
|
||||
cycleway_right = nil,
|
||||
duration = nil,
|
||||
service = nil,
|
||||
foot = nil,
|
||||
foot_forward = nil,
|
||||
foot_backward = nil,
|
||||
bicycle = nil,
|
||||
|
||||
way_type_allows_pushing = false,
|
||||
has_cycleway_forward = false,
|
||||
has_cycleway_backward = false,
|
||||
is_twoway = true,
|
||||
reverse = false,
|
||||
implied_oneway = false
|
||||
}
|
||||
|
||||
local handlers = Sequence {
|
||||
-- set the default mode for this profile. if can be changed later
|
||||
-- in case it turns we're e.g. on a ferry
|
||||
WayHandlers.default_mode,
|
||||
|
||||
-- check various tags that could indicate that the way is not
|
||||
-- routable. this includes things like status=impassable,
|
||||
-- toll=yes and oneway=reversible
|
||||
WayHandlers.blocked_ways,
|
||||
|
||||
-- our main handler
|
||||
handle_bicycle_tags,
|
||||
|
||||
-- compute speed taking into account way type, maxspeed tags, etc.
|
||||
WayHandlers.surface,
|
||||
|
||||
-- handle turn lanes and road classification, used for guidance
|
||||
WayHandlers.classification,
|
||||
|
||||
-- handle allowed start/end modes
|
||||
WayHandlers.startpoint,
|
||||
|
||||
-- handle roundabouts
|
||||
WayHandlers.roundabouts,
|
||||
|
||||
-- set name, ref and pronunciation
|
||||
WayHandlers.names,
|
||||
|
||||
-- set classes
|
||||
WayHandlers.classes,
|
||||
|
||||
-- set weight properties of the way
|
||||
WayHandlers.weights
|
||||
}
|
||||
|
||||
WayHandlers.run(profile, way, result, data, handlers)
|
||||
end
|
||||
|
||||
function process_turn(profile, turn)
|
||||
-- compute turn penalty as angle^2, with a left/right bias
|
||||
local normalized_angle = turn.angle / 90.0
|
||||
if normalized_angle >= 0.0 then
|
||||
turn.duration = normalized_angle * normalized_angle * profile.turn_penalty / profile.turn_bias
|
||||
else
|
||||
turn.duration = normalized_angle * normalized_angle * profile.turn_penalty * profile.turn_bias
|
||||
end
|
||||
|
||||
if turn.is_u_turn then
|
||||
turn.duration = turn.duration + profile.properties.u_turn_penalty
|
||||
end
|
||||
|
||||
if turn.has_traffic_light then
|
||||
turn.duration = turn.duration + profile.properties.traffic_light_penalty
|
||||
end
|
||||
if profile.properties.weight_name == 'cyclability' then
|
||||
turn.weight = turn.duration
|
||||
end
|
||||
if turn.source_mode == mode.cycling and turn.target_mode ~= mode.cycling then
|
||||
turn.weight = turn.weight + profile.properties.mode_change_penalty
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
setup = setup,
|
||||
process_way = process_way,
|
||||
process_node = process_node,
|
||||
process_turn = process_turn
|
||||
}
|
687
admiral-router/vehicles/kpm.lua
Normal file
687
admiral-router/vehicles/kpm.lua
Normal file
@ -0,0 +1,687 @@
|
||||
-- Bicycle profile
|
||||
|
||||
api_version = 4
|
||||
|
||||
Set = require('lib/set')
|
||||
Sequence = require('lib/sequence')
|
||||
Handlers = require("lib/way_handlers")
|
||||
find_access_tag = require("lib/access").find_access_tag
|
||||
limit = require("lib/maxspeed").limit
|
||||
Measure = require("lib/measure")
|
||||
|
||||
function setup()
|
||||
local max_speed = 25
|
||||
local default_speed = 17
|
||||
local walking_speed = 5
|
||||
|
||||
return {
|
||||
properties = {
|
||||
u_turn_penalty = 20,
|
||||
traffic_light_penalty = 2,
|
||||
--weight_name = 'cyclability',
|
||||
weight_name = 'duration',
|
||||
-- weight_name = 'distance',
|
||||
process_call_tagless_node = false,
|
||||
max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s
|
||||
use_turn_restrictions = false,
|
||||
continue_straight_at_waypoint = false,
|
||||
mode_change_penalty = 30,
|
||||
},
|
||||
|
||||
default_mode = mode.cycling,
|
||||
default_speed = default_speed,
|
||||
walking_speed = walking_speed,
|
||||
oneway_handling = true,
|
||||
turn_penalty = 6,
|
||||
turn_bias = 1.4,
|
||||
use_public_transport = true,
|
||||
|
||||
allowed_start_modes = Set {
|
||||
mode.cycling,
|
||||
mode.pushing_bike
|
||||
},
|
||||
|
||||
barrier_blacklist = Set {
|
||||
'yes',
|
||||
'wall',
|
||||
'fence'
|
||||
},
|
||||
|
||||
access_tag_whitelist = Set {
|
||||
'yes',
|
||||
'permissive',
|
||||
'designated'
|
||||
},
|
||||
|
||||
access_tag_blacklist = Set {
|
||||
'no',
|
||||
'private',
|
||||
'agricultural',
|
||||
'forestry',
|
||||
'delivery',
|
||||
-- When a way is tagged with `use_sidepath` a parallel way suitable for
|
||||
-- cyclists is mapped and must be used instead (by law). This tag is
|
||||
-- used on ways that normally may be used by cyclists, but not when
|
||||
-- a signposted parallel cycleway is available. For purposes of routing
|
||||
-- cyclists, this value should be treated as 'no access for bicycles'.
|
||||
'use_sidepath'
|
||||
},
|
||||
|
||||
restricted_access_tag_list = Set { },
|
||||
|
||||
restricted_highway_whitelist = Set { },
|
||||
|
||||
-- tags disallow access to in combination with highway=service
|
||||
service_access_tag_blacklist = Set { },
|
||||
|
||||
construction_whitelist = Set {
|
||||
'no',
|
||||
'widening',
|
||||
'minor',
|
||||
},
|
||||
|
||||
access_tags_hierarchy = Sequence {
|
||||
'bicycle',
|
||||
'vehicle',
|
||||
'access'
|
||||
},
|
||||
|
||||
restrictions = Set {
|
||||
'bicycle'
|
||||
},
|
||||
|
||||
cycleway_tags = Set {
|
||||
'track',
|
||||
'lane',
|
||||
'share_busway',
|
||||
'sharrow',
|
||||
'shared',
|
||||
'shared_lane'
|
||||
},
|
||||
|
||||
opposite_cycleway_tags = Set {
|
||||
'opposite',
|
||||
'opposite_lane',
|
||||
'opposite_track',
|
||||
},
|
||||
|
||||
-- reduce the driving speed by 30% for unsafe roads
|
||||
-- only used for cyclability metric
|
||||
unsafe_highway_list = {
|
||||
primary = 0.5,
|
||||
secondary = 0.65,
|
||||
tertiary = 0.8,
|
||||
primary_link = 0.5,
|
||||
secondary_link = 0.65,
|
||||
tertiary_link = 0.8,
|
||||
},
|
||||
|
||||
service_penalties = {
|
||||
alley = 0.5,
|
||||
},
|
||||
|
||||
bicycle_speeds = {
|
||||
cycleway = default_speed,
|
||||
primary = 25,
|
||||
primary_link = 25,
|
||||
secondary = 25,
|
||||
secondary_link = 25,
|
||||
tertiary = 20,
|
||||
tertiary_link = 20,
|
||||
residential = 12,
|
||||
unclassified = 20,
|
||||
living_street = 10,
|
||||
road = default_speed,
|
||||
service = 10,
|
||||
track = 12,
|
||||
path = 12
|
||||
},
|
||||
|
||||
pedestrian_speeds = {
|
||||
footway = walking_speed,
|
||||
pedestrian = walking_speed,
|
||||
steps = 2
|
||||
},
|
||||
|
||||
railway_speeds = {
|
||||
train = 10,
|
||||
railway = 10,
|
||||
subway = 10,
|
||||
light_rail = 10,
|
||||
monorail = 10,
|
||||
tram = 10
|
||||
},
|
||||
|
||||
platform_speeds = {
|
||||
platform = walking_speed
|
||||
},
|
||||
|
||||
amenity_speeds = {
|
||||
parking = 10,
|
||||
parking_entrance = 10
|
||||
},
|
||||
|
||||
man_made_speeds = {
|
||||
pier = walking_speed
|
||||
},
|
||||
|
||||
route_speeds = {
|
||||
ferry = 5
|
||||
},
|
||||
|
||||
bridge_speeds = {
|
||||
movable = 5
|
||||
},
|
||||
|
||||
surface_speeds = {
|
||||
asphalt = default_speed,
|
||||
["cobblestone:flattened"] = 10,
|
||||
paving_stones = 10,
|
||||
compacted = 10,
|
||||
cobblestone = 6,
|
||||
unpaved = 6,
|
||||
fine_gravel = 6,
|
||||
gravel = 6,
|
||||
pebblestone = 6,
|
||||
ground = 6,
|
||||
dirt = 6,
|
||||
earth = 6,
|
||||
grass = 6,
|
||||
mud = 3,
|
||||
sand = 3,
|
||||
sett = 10
|
||||
},
|
||||
|
||||
classes = Sequence {
|
||||
'ferry', 'tunnel'
|
||||
},
|
||||
|
||||
-- Which classes should be excludable
|
||||
-- This increases memory usage so its disabled by default.
|
||||
excludable = Sequence {
|
||||
-- Set {'ferry'}
|
||||
},
|
||||
|
||||
tracktype_speeds = {
|
||||
},
|
||||
|
||||
smoothness_speeds = {
|
||||
},
|
||||
|
||||
avoid = Set {
|
||||
'impassable',
|
||||
'construction'
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
function process_node(profile, node, result)
|
||||
-- parse access and barrier tags
|
||||
local highway = node:get_value_by_key("highway")
|
||||
local is_crossing = highway and highway == "crossing"
|
||||
|
||||
local access = find_access_tag(node, profile.access_tags_hierarchy)
|
||||
if access and access ~= "" then
|
||||
-- access restrictions on crossing nodes are not relevant for
|
||||
-- the traffic on the road
|
||||
if profile.access_tag_blacklist[access] and not is_crossing then
|
||||
result.barrier = true
|
||||
end
|
||||
else
|
||||
local barrier = node:get_value_by_key("barrier")
|
||||
if barrier and "" ~= barrier then
|
||||
if profile.barrier_blacklist[barrier] then
|
||||
result.barrier = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if node is a traffic light
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if tag and "traffic_signals" == tag then
|
||||
result.traffic_lights = true
|
||||
end
|
||||
end
|
||||
|
||||
function handle_bicycle_tags(profile,way,result,data)
|
||||
-- initial routability check, filters out buildings, boundaries, etc
|
||||
data.route = way:get_value_by_key("route")
|
||||
data.man_made = way:get_value_by_key("man_made")
|
||||
data.railway = way:get_value_by_key("railway")
|
||||
data.amenity = way:get_value_by_key("amenity")
|
||||
data.public_transport = way:get_value_by_key("public_transport")
|
||||
data.bridge = way:get_value_by_key("bridge")
|
||||
|
||||
if (not data.highway or data.highway == '') and
|
||||
(not data.route or data.route == '') and
|
||||
(not profile.use_public_transport or not data.railway or data.railway=='') and
|
||||
(not data.amenity or data.amenity=='') and
|
||||
(not data.man_made or data.man_made=='') and
|
||||
(not data.public_transport or data.public_transport=='') and
|
||||
(not data.bridge or data.bridge=='')
|
||||
then
|
||||
return false
|
||||
end
|
||||
|
||||
-- access
|
||||
data.access = find_access_tag(way, profile.access_tags_hierarchy)
|
||||
if data.access and profile.access_tag_blacklist[data.access] then
|
||||
return false
|
||||
end
|
||||
|
||||
-- other tags
|
||||
data.junction = way:get_value_by_key("junction")
|
||||
data.maxspeed = Measure.get_max_speed(way:get_value_by_key ("maxspeed")) or 0
|
||||
data.maxspeed_forward = Measure.get_max_speed(way:get_value_by_key("maxspeed:forward")) or 0
|
||||
data.maxspeed_backward = Measure.get_max_speed(way:get_value_by_key("maxspeed:backward")) or 0
|
||||
data.barrier = way:get_value_by_key("barrier")
|
||||
data.oneway = way:get_value_by_key("oneway")
|
||||
data.oneway_bicycle = way:get_value_by_key("oneway:bicycle")
|
||||
data.cycleway = way:get_value_by_key("cycleway")
|
||||
data.cycleway_left = way:get_value_by_key("cycleway:left")
|
||||
data.cycleway_right = way:get_value_by_key("cycleway:right")
|
||||
data.duration = way:get_value_by_key("duration")
|
||||
data.service = way:get_value_by_key("service")
|
||||
data.foot = way:get_value_by_key("foot")
|
||||
data.foot_forward = way:get_value_by_key("foot:forward")
|
||||
data.foot_backward = way:get_value_by_key("foot:backward")
|
||||
data.bicycle = way:get_value_by_key("bicycle")
|
||||
|
||||
speed_handler(profile,way,result,data)
|
||||
|
||||
oneway_handler(profile,way,result,data)
|
||||
|
||||
cycleway_handler(profile,way,result,data)
|
||||
|
||||
bike_push_handler(profile,way,result,data)
|
||||
|
||||
|
||||
-- maxspeed
|
||||
limit( result, data.maxspeed, data.maxspeed_forward, data.maxspeed_backward )
|
||||
|
||||
-- not routable if no speed assigned
|
||||
-- this avoid assertions in debug builds
|
||||
if result.forward_speed <= 0 and result.duration <= 0 then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
if result.backward_speed <= 0 and result.duration <= 0 then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
safety_handler(profile,way,result,data)
|
||||
end
|
||||
|
||||
|
||||
|
||||
function speed_handler(profile,way,result,data)
|
||||
|
||||
data.way_type_allows_pushing = false
|
||||
|
||||
-- speed
|
||||
local bridge_speed = profile.bridge_speeds[data.bridge]
|
||||
if (bridge_speed and bridge_speed > 0) then
|
||||
data.highway = data.bridge
|
||||
if data.duration and durationIsValid(data.duration) then
|
||||
result.duration = math.max( parseDuration(data.duration), 1 )
|
||||
end
|
||||
result.forward_speed = bridge_speed
|
||||
result.backward_speed = bridge_speed
|
||||
data.way_type_allows_pushing = true
|
||||
elseif profile.route_speeds[data.route] then
|
||||
-- ferries (doesn't cover routes tagged using relations)
|
||||
result.forward_mode = mode.ferry
|
||||
result.backward_mode = mode.ferry
|
||||
if data.duration and durationIsValid(data.duration) then
|
||||
result.duration = math.max( 1, parseDuration(data.duration) )
|
||||
else
|
||||
result.forward_speed = profile.route_speeds[data.route]
|
||||
result.backward_speed = profile.route_speeds[data.route]
|
||||
end
|
||||
-- railway platforms (old tagging scheme)
|
||||
elseif data.railway and profile.platform_speeds[data.railway] then
|
||||
result.forward_speed = profile.platform_speeds[data.railway]
|
||||
result.backward_speed = profile.platform_speeds[data.railway]
|
||||
data.way_type_allows_pushing = true
|
||||
-- public_transport platforms (new tagging platform)
|
||||
elseif data.public_transport and profile.platform_speeds[data.public_transport] then
|
||||
result.forward_speed = profile.platform_speeds[data.public_transport]
|
||||
result.backward_speed = profile.platform_speeds[data.public_transport]
|
||||
data.way_type_allows_pushing = true
|
||||
-- railways
|
||||
elseif profile.use_public_transport and data.railway and profile.railway_speeds[data.railway] and profile.access_tag_whitelist[data.access] then
|
||||
result.forward_mode = mode.train
|
||||
result.backward_mode = mode.train
|
||||
result.forward_speed = profile.railway_speeds[data.railway]
|
||||
result.backward_speed = profile.railway_speeds[data.railway]
|
||||
elseif data.amenity and profile.amenity_speeds[data.amenity] then
|
||||
-- parking areas
|
||||
result.forward_speed = profile.amenity_speeds[data.amenity]
|
||||
result.backward_speed = profile.amenity_speeds[data.amenity]
|
||||
data.way_type_allows_pushing = true
|
||||
elseif profile.bicycle_speeds[data.highway] then
|
||||
-- regular ways
|
||||
result.forward_speed = profile.bicycle_speeds[data.highway]
|
||||
result.backward_speed = profile.bicycle_speeds[data.highway]
|
||||
data.way_type_allows_pushing = true
|
||||
elseif data.access and profile.access_tag_whitelist[data.access] then
|
||||
-- unknown way, but valid access tag
|
||||
result.forward_speed = profile.default_speed
|
||||
result.backward_speed = profile.default_speed
|
||||
data.way_type_allows_pushing = true
|
||||
end
|
||||
end
|
||||
|
||||
function oneway_handler(profile,way,result,data)
|
||||
-- oneway
|
||||
data.implied_oneway = data.junction == "roundabout" or data.junction == "circular" or data.highway == "motorway"
|
||||
data.reverse = false
|
||||
|
||||
if data.oneway_bicycle == "yes" or data.oneway_bicycle == "1" or data.oneway_bicycle == "true" then
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif data.oneway_bicycle == "no" or data.oneway_bicycle == "0" or data.oneway_bicycle == "false" then
|
||||
-- prevent other cases
|
||||
elseif data.oneway_bicycle == "-1" then
|
||||
result.forward_mode = mode.inaccessible
|
||||
data.reverse = true
|
||||
elseif data.oneway == "yes" or data.oneway == "1" or data.oneway == "true" then
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif data.oneway == "no" or data.oneway == "0" or data.oneway == "false" then
|
||||
-- prevent other cases
|
||||
elseif data.oneway == "-1" then
|
||||
result.forward_mode = mode.inaccessible
|
||||
data.reverse = true
|
||||
elseif data.implied_oneway then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
function cycleway_handler(profile,way,result,data)
|
||||
-- cycleway
|
||||
data.has_cycleway_forward = false
|
||||
data.has_cycleway_backward = false
|
||||
data.is_twoway = result.forward_mode ~= mode.inaccessible and result.backward_mode ~= mode.inaccessible and not data.implied_oneway
|
||||
|
||||
-- cycleways on normal roads
|
||||
if data.is_twoway then
|
||||
if data.cycleway and profile.cycleway_tags[data.cycleway] then
|
||||
data.has_cycleway_backward = true
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
if (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) then
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
if (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) then
|
||||
data.has_cycleway_backward = true
|
||||
end
|
||||
else
|
||||
local has_twoway_cycleway = (data.cycleway and profile.opposite_cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left])
|
||||
local has_opposite_cycleway = (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right])
|
||||
local has_oneway_cycleway = (data.cycleway and profile.cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.cycleway_tags[data.cycleway_left])
|
||||
|
||||
-- set cycleway even though it is an one-way if opposite is tagged
|
||||
if has_twoway_cycleway then
|
||||
data.has_cycleway_backward = true
|
||||
data.has_cycleway_forward = true
|
||||
elseif has_opposite_cycleway then
|
||||
if not data.reverse then
|
||||
data.has_cycleway_backward = true
|
||||
else
|
||||
data.has_cycleway_forward = true
|
||||
end
|
||||
elseif has_oneway_cycleway then
|
||||
if not data.reverse then
|
||||
data.has_cycleway_forward = true
|
||||
else
|
||||
data.has_cycleway_backward = true
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
if data.has_cycleway_backward then
|
||||
result.backward_mode = mode.cycling
|
||||
result.backward_speed = profile.bicycle_speeds["cycleway"]
|
||||
end
|
||||
|
||||
if data.has_cycleway_forward then
|
||||
result.forward_mode = mode.cycling
|
||||
result.forward_speed = profile.bicycle_speeds["cycleway"]
|
||||
end
|
||||
end
|
||||
|
||||
function bike_push_handler(profile,way,result,data)
|
||||
-- pushing bikes - if no other mode found
|
||||
if result.forward_mode == mode.inaccessible or result.backward_mode == mode.inaccessible or
|
||||
result.forward_speed == -1 or result.backward_speed == -1 then
|
||||
if data.foot ~= 'no' then
|
||||
local push_forward_speed = nil
|
||||
local push_backward_speed = nil
|
||||
|
||||
if profile.pedestrian_speeds[data.highway] then
|
||||
push_forward_speed = profile.pedestrian_speeds[data.highway]
|
||||
push_backward_speed = profile.pedestrian_speeds[data.highway]
|
||||
elseif data.man_made and profile.man_made_speeds[data.man_made] then
|
||||
push_forward_speed = profile.man_made_speeds[data.man_made]
|
||||
push_backward_speed = profile.man_made_speeds[data.man_made]
|
||||
else
|
||||
if data.foot == 'yes' then
|
||||
push_forward_speed = profile.walking_speed
|
||||
if not data.implied_oneway then
|
||||
push_backward_speed = profile.walking_speed
|
||||
end
|
||||
elseif data.foot_forward == 'yes' then
|
||||
push_forward_speed = profile.walking_speed
|
||||
elseif data.foot_backward == 'yes' then
|
||||
push_backward_speed = profile.walking_speed
|
||||
elseif data.way_type_allows_pushing then
|
||||
push_forward_speed = profile.walking_speed
|
||||
if not data.implied_oneway then
|
||||
push_backward_speed = profile.walking_speed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if push_forward_speed and (result.forward_mode == mode.inaccessible or result.forward_speed == -1) then
|
||||
result.forward_mode = mode.pushing_bike
|
||||
result.forward_speed = push_forward_speed
|
||||
end
|
||||
if push_backward_speed and (result.backward_mode == mode.inaccessible or result.backward_speed == -1)then
|
||||
result.backward_mode = mode.pushing_bike
|
||||
result.backward_speed = push_backward_speed
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
-- dismount
|
||||
if data.bicycle == "dismount" then
|
||||
result.forward_mode = mode.pushing_bike
|
||||
result.backward_mode = mode.pushing_bike
|
||||
result.forward_speed = profile.walking_speed
|
||||
result.backward_speed = profile.walking_speed
|
||||
end
|
||||
end
|
||||
|
||||
function safety_handler(profile,way,result,data)
|
||||
-- convert duration into cyclability
|
||||
if profile.properties.weight_name == 'cyclability' then
|
||||
local safety_penalty = profile.unsafe_highway_list[data.highway] or 1.
|
||||
local is_unsafe = safety_penalty < 1
|
||||
|
||||
-- primaries that are one ways are probably huge primaries where the lanes need to be separated
|
||||
if is_unsafe and data.highway == 'primary' and not data.is_twoway then
|
||||
safety_penalty = safety_penalty * 0.5
|
||||
end
|
||||
if is_unsafe and data.highway == 'secondary' and not data.is_twoway then
|
||||
safety_penalty = safety_penalty * 0.6
|
||||
end
|
||||
|
||||
local forward_is_unsafe = is_unsafe and not data.has_cycleway_forward
|
||||
local backward_is_unsafe = is_unsafe and not data.has_cycleway_backward
|
||||
local is_undesireable = data.highway == "service" and profile.service_penalties[data.service]
|
||||
local forward_penalty = 1.
|
||||
local backward_penalty = 1.
|
||||
if forward_is_unsafe then
|
||||
forward_penalty = math.min(forward_penalty, safety_penalty)
|
||||
end
|
||||
if backward_is_unsafe then
|
||||
backward_penalty = math.min(backward_penalty, safety_penalty)
|
||||
end
|
||||
|
||||
if is_undesireable then
|
||||
forward_penalty = math.min(forward_penalty, profile.service_penalties[data.service])
|
||||
backward_penalty = math.min(backward_penalty, profile.service_penalties[data.service])
|
||||
end
|
||||
|
||||
if result.forward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.forward_rate = result.forward_speed / 3.6 * forward_penalty
|
||||
end
|
||||
if result.backward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.backward_rate = result.backward_speed / 3.6 * backward_penalty
|
||||
end
|
||||
if result.duration > 0 then
|
||||
result.weight = result.duration / forward_penalty
|
||||
end
|
||||
|
||||
if data.highway == "bicycle" then
|
||||
safety_bonus = safety_bonus + 0.2
|
||||
if result.forward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.forward_rate = result.forward_speed / 3.6 * safety_bonus
|
||||
end
|
||||
if result.backward_speed > 0 then
|
||||
-- convert from km/h to m/s
|
||||
result.backward_rate = result.backward_speed / 3.6 * safety_bonus
|
||||
end
|
||||
if result.duration > 0 then
|
||||
result.weight = result.duration / safety_bonus
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
function process_way(profile, way, result)
|
||||
-- the initial filtering of ways based on presence of tags
|
||||
-- affects processing times significantly, because all ways
|
||||
-- have to be checked.
|
||||
-- to increase performance, prefetching and initial tag check
|
||||
-- is done directly instead of via a handler.
|
||||
|
||||
-- in general we should try to abort as soon as
|
||||
-- possible if the way is not routable, to avoid doing
|
||||
-- unnecessary work. this implies we should check things that
|
||||
-- commonly forbids access early, and handle edge cases later.
|
||||
|
||||
-- data table for storing intermediate values during processing
|
||||
|
||||
local data = {
|
||||
-- prefetch tags
|
||||
highway = way:get_value_by_key('highway'),
|
||||
|
||||
route = nil,
|
||||
man_made = nil,
|
||||
railway = nil,
|
||||
amenity = nil,
|
||||
public_transport = nil,
|
||||
bridge = nil,
|
||||
|
||||
access = nil,
|
||||
|
||||
junction = nil,
|
||||
maxspeed = nil,
|
||||
maxspeed_forward = nil,
|
||||
maxspeed_backward = nil,
|
||||
barrier = nil,
|
||||
oneway = nil,
|
||||
oneway_bicycle = nil,
|
||||
cycleway = nil,
|
||||
cycleway_left = nil,
|
||||
cycleway_right = nil,
|
||||
duration = nil,
|
||||
service = nil,
|
||||
foot = nil,
|
||||
foot_forward = nil,
|
||||
foot_backward = nil,
|
||||
bicycle = nil,
|
||||
|
||||
way_type_allows_pushing = false,
|
||||
has_cycleway_forward = false,
|
||||
has_cycleway_backward = false,
|
||||
is_twoway = true,
|
||||
reverse = false,
|
||||
implied_oneway = false
|
||||
}
|
||||
|
||||
local handlers = Sequence {
|
||||
-- set the default mode for this profile. if can be changed later
|
||||
-- in case it turns we're e.g. on a ferry
|
||||
WayHandlers.default_mode,
|
||||
|
||||
-- check various tags that could indicate that the way is not
|
||||
-- routable. this includes things like status=impassable,
|
||||
-- toll=yes and oneway=reversible
|
||||
WayHandlers.blocked_ways,
|
||||
|
||||
-- our main handler
|
||||
handle_bicycle_tags,
|
||||
|
||||
-- compute speed taking into account way type, maxspeed tags, etc.
|
||||
WayHandlers.surface,
|
||||
|
||||
-- handle turn lanes and road classification, used for guidance
|
||||
WayHandlers.classification,
|
||||
|
||||
-- handle allowed start/end modes
|
||||
WayHandlers.startpoint,
|
||||
|
||||
-- handle roundabouts
|
||||
WayHandlers.roundabouts,
|
||||
|
||||
-- set name, ref and pronunciation
|
||||
WayHandlers.names,
|
||||
|
||||
-- set classes
|
||||
WayHandlers.classes,
|
||||
|
||||
-- set weight properties of the way
|
||||
WayHandlers.weights
|
||||
}
|
||||
|
||||
WayHandlers.run(profile, way, result, data, handlers)
|
||||
end
|
||||
|
||||
function process_turn(profile, turn)
|
||||
-- compute turn penalty as angle^2, with a left/right bias
|
||||
local normalized_angle = turn.angle / 90.0
|
||||
if normalized_angle >= 0.0 then
|
||||
turn.duration = normalized_angle * normalized_angle * profile.turn_penalty / profile.turn_bias
|
||||
else
|
||||
turn.duration = normalized_angle * normalized_angle * profile.turn_penalty * profile.turn_bias
|
||||
end
|
||||
|
||||
if turn.is_u_turn then
|
||||
turn.duration = turn.duration + profile.properties.u_turn_penalty
|
||||
end
|
||||
|
||||
if turn.has_traffic_light then
|
||||
turn.duration = turn.duration + profile.properties.traffic_light_penalty
|
||||
end
|
||||
if profile.properties.weight_name == 'cyclability' then
|
||||
turn.weight = turn.duration
|
||||
end
|
||||
if turn.source_mode == mode.cycling and turn.target_mode ~= mode.cycling then
|
||||
turn.weight = turn.weight + profile.properties.mode_change_penalty
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
setup = setup,
|
||||
process_way = process_way,
|
||||
process_node = process_node,
|
||||
process_turn = process_turn
|
||||
}
|
15
admiral-router/vehicles/lib/access.lua
Normal file
15
admiral-router/vehicles/lib/access.lua
Normal file
@ -0,0 +1,15 @@
|
||||
local ipairs = ipairs
|
||||
|
||||
local Access = {}
|
||||
|
||||
function Access.find_access_tag(source,access_tags_hierarchy)
|
||||
for i,v in ipairs(access_tags_hierarchy) do
|
||||
local tag = source:get_value_by_key(v)
|
||||
if tag then
|
||||
return tag
|
||||
end
|
||||
end
|
||||
return nil
|
||||
end
|
||||
|
||||
return Access
|
29
admiral-router/vehicles/lib/destination.lua
Normal file
29
admiral-router/vehicles/lib/destination.lua
Normal file
@ -0,0 +1,29 @@
|
||||
local Destination = {}
|
||||
|
||||
function Destination.get_directional_tag(way, is_forward, tag)
|
||||
local v
|
||||
if is_forward then
|
||||
v = way:get_value_by_key(tag .. ':forward') or way:get_value_by_key(tag)
|
||||
else
|
||||
v = way:get_value_by_key(tag .. ':backward') or way:get_value_by_key(tag)
|
||||
end
|
||||
if v then
|
||||
return v.gsub(v, ';', ', ')
|
||||
end
|
||||
end
|
||||
|
||||
-- Assemble destination as: "A59: Düsseldorf, Köln"
|
||||
-- destination:ref ^ ^ destination
|
||||
|
||||
function Destination.get_destination(way, is_forward)
|
||||
ref = Destination.get_directional_tag(way, is_forward, 'destination:ref')
|
||||
dest = Destination.get_directional_tag(way, is_forward, 'destination')
|
||||
street = Destination.get_directional_tag(way, is_forward, 'destination:street')
|
||||
if ref and dest then
|
||||
return ref .. ': ' .. dest
|
||||
else
|
||||
return ref or dest or street or ''
|
||||
end
|
||||
end
|
||||
|
||||
return Destination
|
173
admiral-router/vehicles/lib/guidance.lua
Normal file
173
admiral-router/vehicles/lib/guidance.lua
Normal file
@ -0,0 +1,173 @@
|
||||
local Tags = require('lib/tags')
|
||||
local Set = require('lib/set')
|
||||
|
||||
local Guidance = {}
|
||||
|
||||
-- Guidance: Default Mapping from roads to types/priorities
|
||||
highway_classes = {
|
||||
motorway = road_priority_class.motorway,
|
||||
motorway_link = road_priority_class.motorway_link,
|
||||
trunk = road_priority_class.trunk,
|
||||
trunk_link = road_priority_class.trunk_link,
|
||||
primary = road_priority_class.primary,
|
||||
primary_link = road_priority_class.primary_link,
|
||||
secondary = road_priority_class.secondary,
|
||||
secondary_link = road_priority_class.secondary_link,
|
||||
tertiary = road_priority_class.tertiary,
|
||||
tertiary_link = road_priority_class.tertiary_link,
|
||||
unclassified = road_priority_class.unclassified,
|
||||
residential = road_priority_class.main_residential,
|
||||
service = road_priority_class.alley,
|
||||
living_street = road_priority_class.side_residential,
|
||||
track = road_priority_class.bike_path,
|
||||
path = road_priority_class.bike_path,
|
||||
footway = road_priority_class.foot_path,
|
||||
pedestrian = road_priority_class.foot_path,
|
||||
steps = road_priority_class.foot_path
|
||||
}
|
||||
|
||||
default_highway_class = road_priority_class.connectivity;
|
||||
|
||||
motorway_types = Set {
|
||||
'motorway',
|
||||
'motorway_link',
|
||||
'trunk',
|
||||
'trunk_link'
|
||||
}
|
||||
|
||||
-- these road types are set with a car in mind. For bicycle/walk we probably need different ones
|
||||
road_types = Set {
|
||||
'motorway',
|
||||
'motorway_link',
|
||||
'trunk',
|
||||
'trunk_link',
|
||||
'primary',
|
||||
'primary_link',
|
||||
'secondary',
|
||||
'secondary_link',
|
||||
'tertiary',
|
||||
'tertiary_link',
|
||||
'unclassified',
|
||||
'residential',
|
||||
'living_street'
|
||||
}
|
||||
|
||||
link_types = Set {
|
||||
'motorway_link',
|
||||
'trunk_link',
|
||||
'primary_link',
|
||||
'secondary_link',
|
||||
'tertiary_link'
|
||||
}
|
||||
|
||||
-- roads like parking lots are very unimportant for normal driving
|
||||
parking_class = Set{
|
||||
'parking_aisle',
|
||||
'driveway',
|
||||
'drive-through',
|
||||
'emergency_access'
|
||||
}
|
||||
|
||||
function Guidance.set_classification (highway, result, input_way)
|
||||
if motorway_types[highway] then
|
||||
result.road_classification.motorway_class = true
|
||||
end
|
||||
if link_types[highway] then
|
||||
result.road_classification.link_class = true
|
||||
end
|
||||
|
||||
-- All service roads are recognised as alley
|
||||
if highway ~= nil and highway == 'service' then
|
||||
local service_type = input_way:get_value_by_key('service');
|
||||
if service_type ~= nil and parking_class[service_type] then
|
||||
result.road_classification.road_priority_class = road_priority_class.alley
|
||||
else
|
||||
if service_type ~= nil and service_type == 'alley' then
|
||||
result.road_classification.road_priority_class = road_priority_class.alley
|
||||
else
|
||||
if service_type == nil then
|
||||
result.road_classification.road_priority_class = road_priority_class.alley
|
||||
else
|
||||
result.road_classification.road_priority_class = highway_classes[highway]
|
||||
end
|
||||
end
|
||||
end
|
||||
else
|
||||
if highway_classes[highway] ~= nil then
|
||||
result.road_classification.road_priority_class = highway_classes[highway]
|
||||
else
|
||||
result.road_classification.road_priority_class = default_highway_class
|
||||
end
|
||||
end
|
||||
if road_types[highway] then
|
||||
result.road_classification.may_be_ignored = false;
|
||||
else
|
||||
result.road_classification.may_be_ignored = true;
|
||||
end
|
||||
|
||||
local lane_count = input_way:get_value_by_key("lanes")
|
||||
if lane_count then
|
||||
local lc = tonumber(lane_count)
|
||||
if lc ~= nil then
|
||||
result.road_classification.num_lanes = lc
|
||||
end
|
||||
else
|
||||
local total_count = 0
|
||||
local forward_count = input_way:get_value_by_key("lanes:forward")
|
||||
if forward_count then
|
||||
local fc = tonumber(forward_count)
|
||||
if fc ~= nil then
|
||||
total_count = fc
|
||||
end
|
||||
end
|
||||
local backward_count = input_way:get_value_by_key("lanes:backward")
|
||||
if backward_count then
|
||||
local bc = tonumber(backward_count)
|
||||
if bc ~= nil then
|
||||
total_count = total_count + bc
|
||||
end
|
||||
end
|
||||
if total_count ~= 0 then
|
||||
result.road_classification.num_lanes = total_count
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- returns forward,backward psv lane count
|
||||
local function get_psv_counts(way,data)
|
||||
local psv_forward, psv_backward = Tags.get_forward_backward_by_key(way,data,'lanes:psv')
|
||||
if psv_forward then
|
||||
psv_forward = tonumber(psv_forward)
|
||||
end
|
||||
if psv_backward then
|
||||
psv_backward = tonumber(psv_backward)
|
||||
end
|
||||
return psv_forward or 0,
|
||||
psv_backward or 0
|
||||
end
|
||||
|
||||
-- trims lane string with regard to supported lanes
|
||||
local function process_lanes(turn_lanes,vehicle_lanes,first_count,second_count)
|
||||
if turn_lanes then
|
||||
if vehicle_lanes then
|
||||
return applyAccessTokens(turn_lanes,vehicle_lanes)
|
||||
elseif first_count ~= 0 or second_count ~= 0 then
|
||||
return trimLaneString(turn_lanes, first_count, second_count)
|
||||
else
|
||||
return turn_lanes
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- this is broken for left-sided driving. It needs to switch left and right in case of left-sided driving
|
||||
function Guidance.get_turn_lanes(way,data)
|
||||
local psv_fw, psv_bw = get_psv_counts(way,data)
|
||||
local turn_lanes_fw, turn_lanes_bw = Tags.get_forward_backward_by_key(way,data,'turn:lanes')
|
||||
local vehicle_lanes_fw, vehicle_lanes_bw = Tags.get_forward_backward_by_key(way,data,'vehicle:lanes')
|
||||
|
||||
--note: backward lanes swap psv_bw and psv_fw
|
||||
return process_lanes(turn_lanes_fw,vehicle_lanes_fw,psv_bw,psv_fw) or turn_lanes,
|
||||
process_lanes(turn_lanes_bw,vehicle_lanes_bw,psv_fw,psv_bw) or turn_lanes
|
||||
end
|
||||
|
||||
return Guidance
|
19
admiral-router/vehicles/lib/maxspeed.lua
Normal file
19
admiral-router/vehicles/lib/maxspeed.lua
Normal file
@ -0,0 +1,19 @@
|
||||
local math = math
|
||||
|
||||
local MaxSpeed = {}
|
||||
|
||||
function MaxSpeed.limit(way,max,maxf,maxb)
|
||||
if maxf and maxf>0 then
|
||||
way.forward_speed = math.min(way.forward_speed, maxf)
|
||||
elseif max and max>0 then
|
||||
way.forward_speed = math.min(way.forward_speed, max)
|
||||
end
|
||||
|
||||
if maxb and maxb>0 then
|
||||
way.backward_speed = math.min(way.backward_speed, maxb)
|
||||
elseif max and max>0 then
|
||||
way.backward_speed = math.min(way.backward_speed, max)
|
||||
end
|
||||
end
|
||||
|
||||
return MaxSpeed
|
107
admiral-router/vehicles/lib/measure.lua
Normal file
107
admiral-router/vehicles/lib/measure.lua
Normal file
@ -0,0 +1,107 @@
|
||||
local Sequence = require('lib/sequence')
|
||||
|
||||
Measure = {}
|
||||
|
||||
-- measurements conversion constants
|
||||
local inch_to_meters = 0.0254
|
||||
local feet_to_inches = 12
|
||||
local pound_to_kilograms = 0.45359237
|
||||
local miles_to_kilometers = 1.609
|
||||
|
||||
-- Parse speed value as kilometers by hours.
|
||||
function Measure.parse_value_speed(source)
|
||||
local n = tonumber(source:match("%d*"))
|
||||
if n then
|
||||
if string.match(source, "mph") or string.match(source, "mp/h") then
|
||||
n = n * miles_to_kilometers
|
||||
end
|
||||
return n
|
||||
end
|
||||
end
|
||||
|
||||
--- Parse string as a height in meters.
|
||||
--- according to http://wiki.openstreetmap.org/wiki/Key:maxheight
|
||||
function Measure.parse_value_meters(value)
|
||||
local n = tonumber(value:gsub(",", "."):match("%d+%.?%d*"))
|
||||
if n then
|
||||
inches = value:match("'.*")
|
||||
if inches then -- Imperial unit to metric
|
||||
-- try to parse feets/inch
|
||||
n = n * feet_to_inches
|
||||
local m = tonumber(inches:match("%d+"))
|
||||
if m then
|
||||
n = n + m
|
||||
end
|
||||
n = n * inch_to_meters
|
||||
end
|
||||
return n
|
||||
end
|
||||
end
|
||||
|
||||
--- Parse weight value in kilograms.
|
||||
--- according to https://wiki.openstreetmap.org/wiki/Key:maxweight
|
||||
function Measure.parse_value_kilograms(value)
|
||||
local n = tonumber(value:gsub(",", "."):match("%d+%.?%d*"))
|
||||
if n then
|
||||
if string.match(value, "lbs") then
|
||||
n = n * pound_to_kilograms
|
||||
elseif string.match(value, "kg") then
|
||||
-- n = n
|
||||
else -- Default, metric tons
|
||||
n = n * 1000
|
||||
end
|
||||
return n
|
||||
end
|
||||
end
|
||||
|
||||
--- Get maxspeed of specified way in kilometers by hours.
|
||||
function Measure.get_max_speed(raw_value)
|
||||
if raw_value then
|
||||
return Measure.parse_value_speed(raw_value)
|
||||
end
|
||||
end
|
||||
|
||||
-- default maxheight value defined in https://wiki.openstreetmap.org/wiki/Key:maxheight#Non-numerical_values
|
||||
local default_maxheight = 4.5
|
||||
-- Available Non numerical values equal to 4.5; below_default and no_indications are not considered
|
||||
local height_non_numerical_values = Set { "default", "none", "no-sign", "unsigned" }
|
||||
|
||||
--- Get maxheight of specified way in meters. If there are no
|
||||
--- max height, then return nil
|
||||
function Measure.get_max_height(raw_value, element)
|
||||
if raw_value then
|
||||
if height_non_numerical_values[raw_value] then
|
||||
if element then
|
||||
return tonumber(element:get_location_tag('maxheight')) or default_maxheight
|
||||
else
|
||||
return default_maxheight
|
||||
end
|
||||
else
|
||||
return Measure.parse_value_meters(raw_value)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- Get maxwidth of specified way in meters.
|
||||
function Measure.get_max_width(raw_value)
|
||||
if raw_value then
|
||||
return Measure.parse_value_meters(raw_value)
|
||||
end
|
||||
end
|
||||
|
||||
--- Get maxlength of specified way in meters.
|
||||
function Measure.get_max_length(raw_value)
|
||||
if raw_value then
|
||||
return Measure.parse_value_meters(raw_value)
|
||||
end
|
||||
end
|
||||
|
||||
--- Get maxweight of specified way in kilogramms.
|
||||
function Measure.get_max_weight(raw_value)
|
||||
if raw_value then
|
||||
return Measure.parse_value_kilograms(raw_value)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
return Measure;
|
457
admiral-router/vehicles/lib/pprint.lua
Normal file
457
admiral-router/vehicles/lib/pprint.lua
Normal file
@ -0,0 +1,457 @@
|
||||
-- Easy way to print data structes
|
||||
-- From https://github.com/jagt/pprint.lua, file is license as pubic domain
|
||||
|
||||
local pprint = { VERSION = '0.1' }
|
||||
|
||||
pprint.defaults = {
|
||||
-- type display trigger, hide not useful datatypes by default
|
||||
-- custom types are treated as table
|
||||
show_nil = true,
|
||||
show_boolean = true,
|
||||
show_number = true,
|
||||
show_string = true,
|
||||
show_table = true,
|
||||
show_function = false,
|
||||
show_thread = false,
|
||||
show_userdata = false,
|
||||
-- additional display trigger
|
||||
show_metatable = false, -- show metatable
|
||||
show_all = false, -- override other show settings and show everything
|
||||
use_tostring = false, -- use __tostring to print table if available
|
||||
filter_function = nil, -- called like callback(value[,key, parent]), return truty value to hide
|
||||
object_cache = 'local', -- cache blob and table to give it a id, 'local' cache per print, 'global' cache
|
||||
-- per process, falsy value to disable (might cause infinite loop)
|
||||
-- format settings
|
||||
indent_size = 2, -- indent for each nested table level
|
||||
level_width = 80, -- max width per indent level
|
||||
wrap_string = true, -- wrap string when it's longer than level_width
|
||||
wrap_array = false, -- wrap every array elements
|
||||
sort_keys = true, -- sort table keys
|
||||
}
|
||||
|
||||
local TYPES = {
|
||||
['nil'] = 1, ['boolean'] = 2, ['number'] = 3, ['string'] = 4,
|
||||
['table'] = 5, ['function'] = 6, ['thread'] = 7, ['userdata'] = 8
|
||||
}
|
||||
|
||||
-- seems this is the only way to escape these, as lua don't know how to map char '\a' to 'a'
|
||||
local ESCAPE_MAP = {
|
||||
['\a'] = '\\a', ['\b'] = '\\b', ['\f'] = '\\f', ['\n'] = '\\n', ['\r'] = '\\r',
|
||||
['\t'] = '\\t', ['\v'] = '\\v', ['\\'] = '\\\\',
|
||||
}
|
||||
|
||||
-- generic utilities
|
||||
local function escape(s)
|
||||
s = s:gsub('([%c\\])', ESCAPE_MAP)
|
||||
local dq = s:find('"')
|
||||
local sq = s:find("'")
|
||||
if dq and sq then
|
||||
return s:gsub('"', '\\"'), '"'
|
||||
elseif sq then
|
||||
return s, '"'
|
||||
else
|
||||
return s, "'"
|
||||
end
|
||||
end
|
||||
|
||||
local function is_plain_key(key)
|
||||
return type(key) == 'string' and key:match('^[%a_][%a%d_]*$')
|
||||
end
|
||||
|
||||
local CACHE_TYPES = {
|
||||
['table'] = true, ['function'] = true, ['thread'] = true, ['userdata'] = true
|
||||
}
|
||||
|
||||
-- cache would be populated to be like:
|
||||
-- {
|
||||
-- function = { `fun1` = 1, _cnt = 1 }, -- object id
|
||||
-- table = { `table1` = 1, `table2` = 2, _cnt = 2 },
|
||||
-- visited_tables = { `table1` = 7, `table2` = 8 }, -- visit count
|
||||
-- }
|
||||
-- use weakrefs to avoid accidentall adding refcount
|
||||
local function cache_apperance(obj, cache, option)
|
||||
if not cache.visited_tables then
|
||||
cache.visited_tables = setmetatable({}, {__mode = 'k'})
|
||||
end
|
||||
local t = type(obj)
|
||||
|
||||
-- TODO can't test filter_function here as we don't have the ix and key,
|
||||
-- might cause different results?
|
||||
-- respect show_xxx and filter_function to be consistent with print results
|
||||
if (not TYPES[t] and not option.show_table)
|
||||
or (TYPES[t] and not option['show_'..t]) then
|
||||
return
|
||||
end
|
||||
|
||||
if CACHE_TYPES[t] or TYPES[t] == nil then
|
||||
if not cache[t] then
|
||||
cache[t] = setmetatable({}, {__mode = 'k'})
|
||||
cache[t]._cnt = 0
|
||||
end
|
||||
if not cache[t][obj] then
|
||||
cache[t]._cnt = cache[t]._cnt + 1
|
||||
cache[t][obj] = cache[t]._cnt
|
||||
end
|
||||
end
|
||||
if t == 'table' or TYPES[t] == nil then
|
||||
if cache.visited_tables[obj] == false then
|
||||
-- already printed, no need to mark this and its children anymore
|
||||
return
|
||||
elseif cache.visited_tables[obj] == nil then
|
||||
cache.visited_tables[obj] = 1
|
||||
else
|
||||
-- visited already, increment and continue
|
||||
cache.visited_tables[obj] = cache.visited_tables[obj] + 1
|
||||
return
|
||||
end
|
||||
for k, v in pairs(obj) do
|
||||
cache_apperance(k, cache, option)
|
||||
cache_apperance(v, cache, option)
|
||||
end
|
||||
local mt = getmetatable(obj)
|
||||
if mt and option.show_metatable then
|
||||
cache_apperance(mt, cache, option)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- makes 'foo2' < 'foo100000'. string.sub makes substring anyway, no need to use index based method
|
||||
local function str_natural_cmp(lhs, rhs)
|
||||
while #lhs > 0 and #rhs > 0 do
|
||||
local lmid, lend = lhs:find('%d+')
|
||||
local rmid, rend = rhs:find('%d+')
|
||||
if not (lmid and rmid) then return lhs < rhs end
|
||||
|
||||
local lsub = lhs:sub(1, lmid-1)
|
||||
local rsub = rhs:sub(1, rmid-1)
|
||||
if lsub ~= rsub then
|
||||
return lsub < rsub
|
||||
end
|
||||
|
||||
local lnum = tonumber(lhs:sub(lmid, lend))
|
||||
local rnum = tonumber(rhs:sub(rmid, rend))
|
||||
if lnum ~= rnum then
|
||||
return lnum < rnum
|
||||
end
|
||||
|
||||
lhs = lhs:sub(lend+1)
|
||||
rhs = rhs:sub(rend+1)
|
||||
end
|
||||
return lhs < rhs
|
||||
end
|
||||
|
||||
local function cmp(lhs, rhs)
|
||||
local tleft = type(lhs)
|
||||
local tright = type(rhs)
|
||||
if tleft == 'number' and tright == 'number' then return lhs < rhs end
|
||||
if tleft == 'string' and tright == 'string' then return str_natural_cmp(lhs, rhs) end
|
||||
if tleft == tright then return str_natural_cmp(tostring(lhs), tostring(rhs)) end
|
||||
|
||||
-- allow custom types
|
||||
local oleft = TYPES[tleft] or 9
|
||||
local oright = TYPES[tright] or 9
|
||||
return oleft < oright
|
||||
end
|
||||
|
||||
-- setup option with default
|
||||
local function make_option(option)
|
||||
if option == nil then
|
||||
option = {}
|
||||
end
|
||||
for k, v in pairs(pprint.defaults) do
|
||||
if option[k] == nil then
|
||||
option[k] = v
|
||||
end
|
||||
if option.show_all then
|
||||
for t, _ in pairs(TYPES) do
|
||||
option['show_'..t] = true
|
||||
end
|
||||
option.show_metatable = true
|
||||
end
|
||||
end
|
||||
return option
|
||||
end
|
||||
|
||||
-- override defaults and take effects for all following calls
|
||||
function pprint.setup(option)
|
||||
pprint.defaults = make_option(option)
|
||||
end
|
||||
|
||||
-- format lua object into a string
|
||||
function pprint.pformat(obj, option, printer)
|
||||
option = make_option(option)
|
||||
local buf = {}
|
||||
local function default_printer(s)
|
||||
table.insert(buf, s)
|
||||
end
|
||||
printer = printer or default_printer
|
||||
|
||||
local cache
|
||||
if option.object_cache == 'global' then
|
||||
-- steal the cache into a local var so it's not visible from _G or anywhere
|
||||
-- still can't avoid user explicitly referentce pprint._cache but it shouldn't happen anyway
|
||||
cache = pprint._cache or {}
|
||||
pprint._cache = nil
|
||||
elseif option.object_cache == 'local' then
|
||||
cache = {}
|
||||
end
|
||||
|
||||
local last = '' -- used for look back and remove trailing comma
|
||||
local status = {
|
||||
indent = '', -- current indent
|
||||
len = 0, -- current line length
|
||||
}
|
||||
|
||||
local wrapped_printer = function(s)
|
||||
printer(last)
|
||||
last = s
|
||||
end
|
||||
|
||||
local function _indent(d)
|
||||
status.indent = string.rep(' ', d + #(status.indent))
|
||||
end
|
||||
|
||||
local function _n(d)
|
||||
wrapped_printer('\n')
|
||||
wrapped_printer(status.indent)
|
||||
if d then
|
||||
_indent(d)
|
||||
end
|
||||
status.len = 0
|
||||
return true -- used to close bracket correctly
|
||||
end
|
||||
|
||||
local function _p(s, nowrap)
|
||||
status.len = status.len + #s
|
||||
if not nowrap and status.len > option.level_width then
|
||||
_n()
|
||||
wrapped_printer(s)
|
||||
status.len = #s
|
||||
else
|
||||
wrapped_printer(s)
|
||||
end
|
||||
end
|
||||
|
||||
local formatter = {}
|
||||
local function format(v)
|
||||
local f = formatter[type(v)]
|
||||
f = f or formatter.table -- allow patched type()
|
||||
if option.filter_function and option.filter_function(v, nil, nil) then
|
||||
return ''
|
||||
else
|
||||
return f(v)
|
||||
end
|
||||
end
|
||||
|
||||
local function tostring_formatter(v)
|
||||
return tostring(v)
|
||||
end
|
||||
|
||||
local function number_formatter(n)
|
||||
return n == math.huge and '[[math.huge]]' or tostring(n)
|
||||
end
|
||||
|
||||
local function nop_formatter(v)
|
||||
return ''
|
||||
end
|
||||
|
||||
local function make_fixed_formatter(t, has_cache)
|
||||
if has_cache then
|
||||
return function (v)
|
||||
return string.format('[[%s %d]]', t, cache[t][v])
|
||||
end
|
||||
else
|
||||
return function (v)
|
||||
return '[['..t..']]'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function string_formatter(s, force_long_quote)
|
||||
local s, quote = escape(s)
|
||||
local quote_len = force_long_quote and 4 or 2
|
||||
if quote_len + #s + status.len > option.level_width then
|
||||
_n()
|
||||
-- only wrap string when is longer than level_width
|
||||
if option.wrap_string and #s + quote_len > option.level_width then
|
||||
-- keep the quotes together
|
||||
_p('[[')
|
||||
while #s + status.len >= option.level_width do
|
||||
local seg = option.level_width - status.len
|
||||
_p(string.sub(s, 1, seg), true)
|
||||
_n()
|
||||
s = string.sub(s, seg+1)
|
||||
end
|
||||
_p(s) -- print the remaining parts
|
||||
return ']]'
|
||||
end
|
||||
end
|
||||
|
||||
return force_long_quote and '[['..s..']]' or quote..s..quote
|
||||
end
|
||||
|
||||
local function table_formatter(t)
|
||||
if option.use_tostring then
|
||||
local mt = getmetatable(t)
|
||||
if mt and mt.__tostring then
|
||||
return string_formatter(tostring(t), true)
|
||||
end
|
||||
end
|
||||
|
||||
local print_header_ix = nil
|
||||
local ttype = type(t)
|
||||
if option.object_cache then
|
||||
local cache_state = cache.visited_tables[t]
|
||||
local tix = cache[ttype][t]
|
||||
-- FIXME should really handle `cache_state == nil`
|
||||
-- as user might add things through filter_function
|
||||
if cache_state == false then
|
||||
-- already printed, just print the the number
|
||||
return string_formatter(string.format('%s %d', ttype, tix), true)
|
||||
elseif cache_state > 1 then
|
||||
-- appeared more than once, print table header with number
|
||||
print_header_ix = tix
|
||||
cache.visited_tables[t] = false
|
||||
else
|
||||
-- appeared exactly once, print like a normal table
|
||||
end
|
||||
end
|
||||
|
||||
local tlen = #t
|
||||
local wrapped = false
|
||||
_p('{')
|
||||
_indent(option.indent_size)
|
||||
_p(string.rep(' ', option.indent_size - 1))
|
||||
if print_header_ix then
|
||||
_p(string.format('--[[%s %d]] ', ttype, print_header_ix))
|
||||
end
|
||||
for ix = 1,tlen do
|
||||
local v = t[ix]
|
||||
if formatter[type(v)] == nop_formatter or
|
||||
(option.filter_function and option.filter_function(v, ix, t)) then
|
||||
-- pass
|
||||
else
|
||||
if option.wrap_array then
|
||||
wrapped = _n()
|
||||
end
|
||||
_p(format(v)..', ')
|
||||
end
|
||||
end
|
||||
|
||||
-- hashmap part of the table, in contrast to array part
|
||||
local function is_hash_key(k)
|
||||
local numkey = tonumber(k)
|
||||
if numkey ~= k or numkey > tlen then
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
local function print_kv(k, v, t)
|
||||
-- can't use option.show_x as obj may contain custom type
|
||||
if formatter[type(v)] == nop_formatter or
|
||||
formatter[type(k)] == nop_formatter or
|
||||
(option.filter_function and option.filter_function(v, k, t)) then
|
||||
return
|
||||
end
|
||||
wrapped = _n()
|
||||
if is_plain_key(k) then
|
||||
_p(k, true)
|
||||
else
|
||||
_p('[')
|
||||
-- [[]] type string in key is illegal, needs to add spaces inbetween
|
||||
local k = format(k)
|
||||
if string.match(k, '%[%[') then
|
||||
_p(' '..k..' ', true)
|
||||
else
|
||||
_p(k, true)
|
||||
end
|
||||
_p(']')
|
||||
end
|
||||
_p(' = ', true)
|
||||
_p(format(v), true)
|
||||
_p(',', true)
|
||||
end
|
||||
|
||||
if option.sort_keys then
|
||||
local keys = {}
|
||||
for k, _ in pairs(t) do
|
||||
if is_hash_key(k) then
|
||||
table.insert(keys, k)
|
||||
end
|
||||
end
|
||||
table.sort(keys, cmp)
|
||||
for _, k in ipairs(keys) do
|
||||
print_kv(k, t[k], t)
|
||||
end
|
||||
else
|
||||
for k, v in pairs(t) do
|
||||
if is_hash_key(k) then
|
||||
print_kv(k, v, t)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if option.show_metatable then
|
||||
local mt = getmetatable(t)
|
||||
if mt then
|
||||
print_kv('__metatable', mt, t)
|
||||
end
|
||||
end
|
||||
|
||||
_indent(-option.indent_size)
|
||||
-- make { } into {}
|
||||
last = string.gsub(last, '^ +$', '')
|
||||
-- peek last to remove trailing comma
|
||||
last = string.gsub(last, ',%s*$', ' ')
|
||||
if wrapped then
|
||||
_n()
|
||||
end
|
||||
_p('}')
|
||||
|
||||
return ''
|
||||
end
|
||||
|
||||
-- set formatters
|
||||
formatter['nil'] = option.show_nil and tostring_formatter or nop_formatter
|
||||
formatter['boolean'] = option.show_boolean and tostring_formatter or nop_formatter
|
||||
formatter['number'] = option.show_number and number_formatter or nop_formatter -- need to handle math.huge
|
||||
formatter['function'] = option.show_function and make_fixed_formatter('function', option.object_cache) or nop_formatter
|
||||
formatter['thread'] = option.show_thread and make_fixed_formatter('thread', option.object_cache) or nop_formatter
|
||||
formatter['userdata'] = option.show_userdata and make_fixed_formatter('userdata', option.object_cache) or nop_formatter
|
||||
formatter['string'] = option.show_string and string_formatter or nop_formatter
|
||||
formatter['table'] = option.show_table and table_formatter or nop_formatter
|
||||
|
||||
if option.object_cache then
|
||||
-- needs to visit the table before start printing
|
||||
cache_apperance(obj, cache, option)
|
||||
end
|
||||
|
||||
_p(format(obj))
|
||||
printer(last) -- close the buffered one
|
||||
|
||||
-- put cache back if global
|
||||
if option.object_cache == 'global' then
|
||||
pprint._cache = cache
|
||||
end
|
||||
|
||||
return table.concat(buf)
|
||||
end
|
||||
|
||||
-- pprint all the arguments
|
||||
function pprint.pprint( ... )
|
||||
local args = {...}
|
||||
-- select will get an accurate count of array len, counting trailing nils
|
||||
local len = select('#', ...)
|
||||
for ix = 1,len do
|
||||
pprint.pformat(args[ix], nil, io.write)
|
||||
io.write('\n')
|
||||
end
|
||||
end
|
||||
|
||||
setmetatable(pprint, {
|
||||
__call = function (_, ...)
|
||||
pprint.pprint(...)
|
||||
end
|
||||
})
|
||||
|
||||
return pprint
|
142
admiral-router/vehicles/lib/profile_debugger.lua
Normal file
142
admiral-router/vehicles/lib/profile_debugger.lua
Normal file
@ -0,0 +1,142 @@
|
||||
-- Enable calling our lua profile code directly from the lua command line,
|
||||
-- which makes it easier to debug.
|
||||
-- We simulate the normal C++ environment by defining the required globals and functions.
|
||||
|
||||
-- See debug_example.lua for an example of how to require and use this file.
|
||||
|
||||
-- for more convenient printing of tables
|
||||
local pprint = require('lib/pprint')
|
||||
|
||||
|
||||
-- globals that are normally set from C++
|
||||
|
||||
-- should match values defined in include/extractor/road_classification.hpp
|
||||
road_priority_class = {
|
||||
motorway = 0,
|
||||
trunk = 2,
|
||||
primary = 4,
|
||||
secondary = 6,
|
||||
tertiary = 8,
|
||||
main_residential = 10,
|
||||
side_residential = 11,
|
||||
link_road = 14,
|
||||
bike_path = 16,
|
||||
foot_path = 18,
|
||||
connectivity = 31,
|
||||
}
|
||||
|
||||
-- should match values defined in include/extractor/travel_mode.hpp
|
||||
mode = {
|
||||
inaccessible = 0,
|
||||
driving = 1,
|
||||
cycling = 2,
|
||||
walking = 3,
|
||||
ferry = 4,
|
||||
train = 5,
|
||||
pushing_bike = 6,
|
||||
}
|
||||
|
||||
-- Mock C++ helper functions which are called from LUA.
|
||||
-- TODO
|
||||
-- Debugging LUA code that uses these will not work correctly
|
||||
-- unless we reimplement the methods in LUA.
|
||||
|
||||
function durationIsValid(str)
|
||||
return true
|
||||
end
|
||||
|
||||
function parseDuration(str)
|
||||
return 1
|
||||
end
|
||||
|
||||
function canonicalizeStringList(str)
|
||||
return str
|
||||
end
|
||||
|
||||
|
||||
|
||||
-- debug helper
|
||||
local Debug = {}
|
||||
|
||||
-- helpers for sorting associative array
|
||||
function Debug.get_keys_sorted_by_value(tbl, sortFunction)
|
||||
local keys = {}
|
||||
for key in pairs(tbl) do
|
||||
table.insert(keys, key)
|
||||
end
|
||||
|
||||
table.sort(keys, function(a, b)
|
||||
return sortFunction(tbl[a], tbl[b])
|
||||
end)
|
||||
|
||||
return keys
|
||||
end
|
||||
|
||||
-- helper for printing sorted array
|
||||
function Debug.print_sorted(sorted,associative)
|
||||
for _, key in ipairs(sorted) do
|
||||
print(associative[key], key)
|
||||
end
|
||||
end
|
||||
|
||||
function Debug.report_tag_fetches()
|
||||
print("Tag fetches:")
|
||||
sorted_counts = Debug.get_keys_sorted_by_value(Debug.tags.counts, function(a, b) return a > b end)
|
||||
Debug.print_sorted(sorted_counts, Debug.tags.counts)
|
||||
print(Debug.tags.total, 'total')
|
||||
end
|
||||
|
||||
function Debug.load_profile(profile)
|
||||
Debug.functions = require(profile)
|
||||
Debug.profile = Debug.functions.setup()
|
||||
end
|
||||
|
||||
function Debug.reset_tag_fetch_counts()
|
||||
Debug.tags = {
|
||||
total = 0,
|
||||
counts = {}
|
||||
}
|
||||
end
|
||||
|
||||
function Debug.register_tag_fetch(k)
|
||||
if Debug.tags.total then
|
||||
Debug.tags.total = Debug.tags.total + 1
|
||||
else
|
||||
Debug['tags']['total'] = 1
|
||||
end
|
||||
|
||||
if Debug['tags']['counts'][k] then
|
||||
Debug['tags']['counts'][k] = Debug['tags']['counts'][k] + 1
|
||||
else
|
||||
Debug['tags']['counts'][k] = 1
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
function Debug.process_way(way,result)
|
||||
|
||||
-- setup result table
|
||||
result.road_classification = {}
|
||||
result.forward_speed = -1
|
||||
result.backward_speed = -1
|
||||
result.duration = 0
|
||||
result.forward_classes = {}
|
||||
result.backward_classes = {}
|
||||
|
||||
-- intercept tag functions normally provided via C++
|
||||
function way:get_value_by_key(k)
|
||||
Debug.register_tag_fetch(k)
|
||||
return self[k]
|
||||
end
|
||||
function way:get_location_tag(k)
|
||||
return nil
|
||||
end
|
||||
|
||||
-- reset tag counts
|
||||
Debug:reset_tag_fetch_counts()
|
||||
|
||||
-- call the way processsing function
|
||||
Debug.functions.process_way(Debug.profile,way,result)
|
||||
end
|
||||
|
||||
return Debug
|
261
admiral-router/vehicles/lib/relations.lua
Normal file
261
admiral-router/vehicles/lib/relations.lua
Normal file
@ -0,0 +1,261 @@
|
||||
-- Profile functions dealing with various aspects of relation parsing
|
||||
--
|
||||
-- You can run a selection you find useful in your profile,
|
||||
-- or do you own processing if/when required.
|
||||
|
||||
Utils = require('lib/utils')
|
||||
|
||||
Relations = {}
|
||||
|
||||
function is_direction(role)
|
||||
return (role == 'north' or role == 'south' or role == 'west' or role == 'east')
|
||||
end
|
||||
|
||||
-- match ref values to relations data
|
||||
function Relations.match_to_ref(relations, ref)
|
||||
|
||||
function calculate_scores(refs, tag_value)
|
||||
local tag_tokens = Set(Utils.tokenize_common(tag_value))
|
||||
local result = {}
|
||||
for i, r in ipairs(refs) do
|
||||
local ref_tokens = Utils.tokenize_common(r)
|
||||
local score = 0
|
||||
|
||||
for _, t in ipairs(ref_tokens) do
|
||||
if tag_tokens[t] then
|
||||
if Utils.is_number(t) then
|
||||
score = score + 2
|
||||
else
|
||||
score = score + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
result[r] = score
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
local references = Utils.string_list_tokens(ref)
|
||||
local result_match = {}
|
||||
local order = {}
|
||||
for i, r in ipairs(references) do
|
||||
result_match[r] = { forward = nil, backward = nil }
|
||||
order[i] = r
|
||||
end
|
||||
|
||||
for i, rel in ipairs(relations) do
|
||||
local name_scores = nil
|
||||
local name_tokens = {}
|
||||
local route_name = rel["route_name"]
|
||||
if route_name then
|
||||
name_scores = calculate_scores(references, route_name)
|
||||
end
|
||||
|
||||
local ref_scores = nil
|
||||
local ref_tokens = {}
|
||||
local route_ref = rel["route_ref"]
|
||||
if route_ref then
|
||||
ref_scores = calculate_scores(references, route_ref)
|
||||
end
|
||||
|
||||
-- merge scores
|
||||
local direction = rel["route_direction"]
|
||||
if direction then
|
||||
local best_score = -1
|
||||
local best_ref = nil
|
||||
|
||||
function find_best(scores)
|
||||
if scores then
|
||||
for k ,v in pairs(scores) do
|
||||
if v > best_score then
|
||||
best_ref = k
|
||||
best_score = v
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
find_best(name_scores)
|
||||
find_best(ref_scores)
|
||||
|
||||
if best_ref then
|
||||
local result_direction = result_match[best_ref]
|
||||
|
||||
local is_forward = rel["route_forward"]
|
||||
if is_forward == nil then
|
||||
result_direction.forward = direction
|
||||
result_direction.backward = direction
|
||||
elseif is_forward == true then
|
||||
result_direction.forward = direction
|
||||
else
|
||||
result_direction.backward = direction
|
||||
end
|
||||
|
||||
result_match[best_ref] = result_direction
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
local result = {}
|
||||
for i, r in ipairs(order) do
|
||||
result[i] = { ref = r, dir = result_match[r] };
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
function get_direction_from_superrel(rel, relations)
|
||||
local result = nil
|
||||
local result_id = nil
|
||||
local rel_id_list = relations:get_relations(rel)
|
||||
|
||||
function set_result(direction, current_rel)
|
||||
if (result ~= nil) and (direction ~= nil) then
|
||||
print('WARNING: relation ' .. rel:id() .. ' is a part of more then one supperrelations ' .. result_id .. ' and ' .. current_rel:id())
|
||||
result = nil
|
||||
else
|
||||
result = direction
|
||||
result_id = current_rel:id()
|
||||
end
|
||||
end
|
||||
|
||||
for i, rel_id in ipairs(rel_id_list) do
|
||||
local parent_rel = relations:relation(rel_id)
|
||||
if parent_rel:get_value_by_key('type') == 'route' then
|
||||
local role = parent_rel:get_role(rel)
|
||||
|
||||
if is_direction(role) then
|
||||
set_result(role, parent_rel)
|
||||
else
|
||||
local dir = parent_rel:get_value_by_key('direction')
|
||||
if is_direction(dir) then
|
||||
set_result(dir, parent_rel)
|
||||
end
|
||||
end
|
||||
end
|
||||
-- TODO: support forward/backward
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
function Relations.parse_route_relation(rel, way, relations)
|
||||
local t = rel:get_value_by_key("type")
|
||||
local role = rel:get_role(way)
|
||||
local result = {}
|
||||
|
||||
function add_extra_data(m)
|
||||
local name = rel:get_value_by_key("name")
|
||||
if name then
|
||||
result['route_name'] = name
|
||||
end
|
||||
|
||||
local ref = rel:get_value_by_key("ref")
|
||||
if ref then
|
||||
result['route_ref'] = ref
|
||||
end
|
||||
end
|
||||
|
||||
if t == 'route' then
|
||||
local role_direction = nil
|
||||
local route = rel:get_value_by_key("route")
|
||||
if route == 'road' then
|
||||
-- process case, where directions set as role
|
||||
if is_direction(role) then
|
||||
role_direction = role
|
||||
end
|
||||
end
|
||||
|
||||
local tag_direction = nil
|
||||
local direction = rel:get_value_by_key('direction')
|
||||
if direction then
|
||||
direction = string.lower(direction)
|
||||
if is_direction(direction) then
|
||||
tag_direction = direction
|
||||
end
|
||||
end
|
||||
|
||||
-- determine direction
|
||||
local result_direction = role_direction
|
||||
if result_direction == nil and tag_direction ~= '' then
|
||||
result_direction = tag_direction
|
||||
end
|
||||
|
||||
if role_direction ~= nil and tag_direction ~= nil and role_direction ~= tag_direction then
|
||||
result_direction = nil
|
||||
print('WARNING: conflict direction in role of way ' .. way:id() .. ' and direction tag in relation ' .. rel:id())
|
||||
end
|
||||
|
||||
|
||||
-- process superrelations
|
||||
local super_dir = get_direction_from_superrel(rel, relations)
|
||||
|
||||
-- check if there are data error
|
||||
if (result_direction ~= nil) and (super_dir ~= nil) and (result_direction ~= super_dir) then
|
||||
print('ERROR: conflicting relation directions found for way ' .. way:id() ..
|
||||
' relation direction is ' .. result_direction .. ' superrelation direction is ' .. super_dir)
|
||||
result_direction = nil
|
||||
elseif result_direction == nil then
|
||||
result_direction = super_dir
|
||||
end
|
||||
|
||||
result['route_direction'] = result_direction
|
||||
|
||||
if role == 'forward' then
|
||||
result['route_forward'] = true
|
||||
elseif role == 'backward' then
|
||||
result['route_forward'] = false
|
||||
else
|
||||
result['route_forward'] = nil
|
||||
end
|
||||
|
||||
add_extra_data(m)
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
function Relations.process_way_refs(way, relations, result)
|
||||
local parsed_rel_list = {}
|
||||
local rel_id_list = relations:get_relations(way)
|
||||
for i, rel_id in ipairs(rel_id_list) do
|
||||
local rel = relations:relation(rel_id)
|
||||
parsed_rel_list[i] = Relations.parse_route_relation(rel, way, relations)
|
||||
end
|
||||
|
||||
-- now process relations data
|
||||
local matched_refs = nil;
|
||||
if result.ref then
|
||||
local match_res = Relations.match_to_ref(parsed_rel_list, result.ref)
|
||||
|
||||
function gen_ref(is_forward)
|
||||
local ref = ''
|
||||
for _, m in pairs(match_res) do
|
||||
if ref ~= '' then
|
||||
ref = ref .. '; '
|
||||
end
|
||||
|
||||
local dir = m.dir.forward
|
||||
if is_forward == false then
|
||||
dir = m.dir.backward
|
||||
end
|
||||
|
||||
if dir then
|
||||
ref = ref .. m.ref .. ' $' .. dir
|
||||
else
|
||||
ref = ref .. m.ref
|
||||
end
|
||||
end
|
||||
|
||||
return ref
|
||||
end
|
||||
|
||||
result.forward_ref = gen_ref(true)
|
||||
result.backward_ref = gen_ref(false)
|
||||
end
|
||||
end
|
||||
|
||||
return Relations
|
10
admiral-router/vehicles/lib/sequence.lua
Normal file
10
admiral-router/vehicles/lib/sequence.lua
Normal file
@ -0,0 +1,10 @@
|
||||
-- Sequence of items
|
||||
-- Ordered, but have to loop through items to check for inclusion.
|
||||
-- Currently the same as a table.
|
||||
-- Adds the convenience function append() to append to the sequnce.
|
||||
|
||||
function Sequence(source)
|
||||
return source
|
||||
end
|
||||
|
||||
return Sequence
|
23
admiral-router/vehicles/lib/set.lua
Normal file
23
admiral-router/vehicles/lib/set.lua
Normal file
@ -0,0 +1,23 @@
|
||||
-- Set of items
|
||||
-- Fast check for inclusion, but unordered.
|
||||
--
|
||||
-- Instead of having to do:
|
||||
-- whitelist = { 'apple'=true, 'cherries'=true, 'melons'=true }
|
||||
--
|
||||
-- you can do:
|
||||
-- whitelist = Set { 'apple', 'cherries', 'melons' }
|
||||
--
|
||||
-- and then use it as:
|
||||
-- print( whitelist['cherries'] ) => true
|
||||
|
||||
function Set(source)
|
||||
set = {}
|
||||
if source then
|
||||
for i,v in ipairs(source) do
|
||||
set[v] = true
|
||||
end
|
||||
end
|
||||
return set
|
||||
end
|
||||
|
||||
return Set
|
131
admiral-router/vehicles/lib/tags.lua
Normal file
131
admiral-router/vehicles/lib/tags.lua
Normal file
@ -0,0 +1,131 @@
|
||||
-- Helpers for searching and parsing tags
|
||||
|
||||
local Tags = {}
|
||||
|
||||
-- return [forward,backward] values for a specific tag.
|
||||
-- e.g. for maxspeed search forward:
|
||||
-- maxspeed:forward
|
||||
-- maxspeed
|
||||
-- and backward:
|
||||
-- maxspeed:backward
|
||||
-- maxspeed
|
||||
|
||||
function Tags.get_forward_backward_by_key(way,data,key)
|
||||
local forward = way:get_value_by_key(key .. ':forward')
|
||||
local backward = way:get_value_by_key(key .. ':backward')
|
||||
|
||||
if not forward or not backward then
|
||||
local common = way:get_value_by_key(key)
|
||||
|
||||
if data.is_forward_oneway then
|
||||
forward = forward or common
|
||||
elseif data.is_reverse_oneway then
|
||||
backward = backward or common
|
||||
else
|
||||
forward = forward or common
|
||||
backward = backward or common
|
||||
end
|
||||
end
|
||||
|
||||
return forward, backward
|
||||
end
|
||||
|
||||
-- return [forward,backward] values, searching a
|
||||
-- prioritized sequence of tags
|
||||
-- e.g. for the sequence [maxspeed,advisory] search forward:
|
||||
-- maxspeed:forward
|
||||
-- maxspeed
|
||||
-- advisory:forward
|
||||
-- advisory
|
||||
-- and for backward:
|
||||
-- maxspeed:backward
|
||||
-- maxspeed
|
||||
-- advisory:backward
|
||||
-- advisory
|
||||
|
||||
function Tags.get_forward_backward_by_set(way,data,keys)
|
||||
local forward, backward
|
||||
for i,key in ipairs(keys) do
|
||||
if not forward then
|
||||
forward = way:get_value_by_key(key .. ':forward')
|
||||
end
|
||||
if not backward then
|
||||
backward = way:get_value_by_key(key .. ':backward')
|
||||
end
|
||||
if not forward or not backward then
|
||||
local common = way:get_value_by_key(key)
|
||||
forward = forward or common
|
||||
backward = backward or common
|
||||
end
|
||||
if forward and backward then
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
return forward, backward
|
||||
end
|
||||
|
||||
-- look through a sequence of keys combined with a prefix
|
||||
-- e.g. for the sequence [motorcar,motor_vehicle,vehicle] and the prefix 'oneway' search for:
|
||||
-- oneway:motorcar
|
||||
-- oneway:motor_vehicle
|
||||
-- oneway:vehicle
|
||||
|
||||
function Tags.get_value_by_prefixed_sequence(way,seq,prefix)
|
||||
local v
|
||||
for i,key in ipairs(seq) do
|
||||
v = way:get_value_by_key(prefix .. ':' .. key)
|
||||
if v then
|
||||
return v
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- look through a sequence of keys combined with a postfix
|
||||
-- e.g. for the sequence [motorcar,motor_vehicle,vehicle] and the postfix 'oneway' search for:
|
||||
-- motorcar:oneway
|
||||
-- motor_vehicle:oneway
|
||||
-- vehicle:oneway
|
||||
|
||||
function Tags.get_value_by_postfixed_sequence(way,seq,postfix)
|
||||
local v
|
||||
for i,key in ipairs(seq) do
|
||||
v = way:get_value_by_key(key .. ':' .. postfix)
|
||||
if v then
|
||||
return v
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if key-value pairs are set in a way and return a
|
||||
-- corresponding constant if it is. e.g. for this input:
|
||||
--
|
||||
-- local speeds = {
|
||||
-- highway = {
|
||||
-- residential = 20,
|
||||
-- primary = 40
|
||||
-- },
|
||||
-- amenity = {
|
||||
-- parking = 10
|
||||
-- }
|
||||
-- }
|
||||
--
|
||||
-- we would check whether the following key-value combinations
|
||||
-- are set, and return the corresponding constant:
|
||||
--
|
||||
-- highway = residential => 20
|
||||
-- highway = primary => 40
|
||||
-- amenity = parking => 10
|
||||
|
||||
function Tags.get_constant_by_key_value(way,lookup)
|
||||
for key,set in pairs(lookup) do
|
||||
local way_value = way:get_value_by_key(key)
|
||||
for value,t in pairs(set) do
|
||||
if way_value == value then
|
||||
return key,value,t
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return Tags
|
26
admiral-router/vehicles/lib/traffic_signal.lua
Normal file
26
admiral-router/vehicles/lib/traffic_signal.lua
Normal file
@ -0,0 +1,26 @@
|
||||
-- Assigns traffic light value to node as defined by
|
||||
-- include/extractor/traffic_lights.hpp
|
||||
|
||||
local TrafficSignal = {}
|
||||
|
||||
function TrafficSignal.get_value(node)
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if "traffic_signals" == tag then
|
||||
local direction = node:get_value_by_key("traffic_signals:direction")
|
||||
if direction then
|
||||
if "forward" == direction then
|
||||
return traffic_lights.direction_forward
|
||||
end
|
||||
if "backward" == direction then
|
||||
return traffic_lights.direction_reverse
|
||||
end
|
||||
end
|
||||
-- return traffic_lights.direction_all
|
||||
return true
|
||||
end
|
||||
-- return traffic_lights.none
|
||||
return false
|
||||
end
|
||||
|
||||
return TrafficSignal
|
||||
|
43
admiral-router/vehicles/lib/utils.lua
Normal file
43
admiral-router/vehicles/lib/utils.lua
Normal file
@ -0,0 +1,43 @@
|
||||
-- Profile functions to implement common algorithms of data processing
|
||||
--
|
||||
-- You can run a selection you find useful in your profile,
|
||||
-- or do you own processing if/when required.
|
||||
|
||||
Utils = {}
|
||||
|
||||
-- split string 'a; b; c' to table with values ['a', 'b', 'c']
|
||||
-- so it use just one separator ';'
|
||||
function Utils.string_list_tokens(str)
|
||||
result = {}
|
||||
local idx = 0
|
||||
for s in str.gmatch(str, "([^;]*)") do
|
||||
if s ~= nil and s ~= '' then
|
||||
idx = idx + 1
|
||||
result[idx] = s:gsub("^%s*(.-)%s*$", "%1")
|
||||
end
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- same as Utils.StringListTokens, but with many possible separators:
|
||||
-- ',' | ';' | ' '| '(' | ')'
|
||||
function Utils.tokenize_common(str)
|
||||
result = {}
|
||||
local idx = 0
|
||||
for s in str.gmatch(str, "%S+") do
|
||||
if s ~= nil and s ~= '' then
|
||||
idx = idx + 1
|
||||
result[idx] = s:gsub("^%s*(.-)%s*$", "%1")
|
||||
end
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
-- returns true, if string contains a number
|
||||
function Utils.is_number(str)
|
||||
return (tonumber(str) ~= nil)
|
||||
end
|
||||
|
||||
return Utils
|
717
admiral-router/vehicles/lib/way_handlers.lua
Normal file
717
admiral-router/vehicles/lib/way_handlers.lua
Normal file
@ -0,0 +1,717 @@
|
||||
-- Profile handlers dealing with various aspects of tag parsing
|
||||
--
|
||||
-- You can run a selection you find useful in your profile,
|
||||
-- or do you own processing if/when required.
|
||||
|
||||
|
||||
local get_turn_lanes = require("lib/guidance").get_turn_lanes
|
||||
local set_classification = require("lib/guidance").set_classification
|
||||
local get_destination = require("lib/destination").get_destination
|
||||
local Tags = require('lib/tags')
|
||||
local Measure = require("lib/measure")
|
||||
|
||||
WayHandlers = {}
|
||||
|
||||
-- check that way has at least one tag that could imply routability-
|
||||
-- we store the checked tags in data, to avoid fetching again later
|
||||
function WayHandlers.tag_prefetch(profile,way,result,data)
|
||||
for key,v in pairs(profile.prefetch) do
|
||||
data[key] = way:get_value_by_key( key )
|
||||
end
|
||||
|
||||
return next(data) ~= nil
|
||||
end
|
||||
|
||||
-- set default mode
|
||||
function WayHandlers.default_mode(profile,way,result,data)
|
||||
result.forward_mode = profile.default_mode
|
||||
result.backward_mode = profile.default_mode
|
||||
end
|
||||
|
||||
-- handles name, including ref and pronunciation
|
||||
function WayHandlers.names(profile,way,result,data)
|
||||
-- parse the remaining tags
|
||||
local name = way:get_value_by_key("name")
|
||||
local pronunciation = way:get_value_by_key("name:pronunciation")
|
||||
local ref = way:get_value_by_key("ref")
|
||||
local exits = way:get_value_by_key("junction:ref")
|
||||
|
||||
-- Set the name that will be used for instructions
|
||||
if name then
|
||||
result.name = name
|
||||
end
|
||||
|
||||
if ref then
|
||||
result.ref = canonicalizeStringList(ref, ";")
|
||||
end
|
||||
|
||||
if pronunciation then
|
||||
result.pronunciation = pronunciation
|
||||
end
|
||||
|
||||
if exits then
|
||||
result.exits = canonicalizeStringList(exits, ";")
|
||||
end
|
||||
end
|
||||
|
||||
-- junctions
|
||||
function WayHandlers.roundabouts(profile,way,result,data)
|
||||
local junction = way:get_value_by_key("junction");
|
||||
|
||||
if junction == "roundabout" then
|
||||
result.roundabout = true
|
||||
end
|
||||
|
||||
-- See Issue 3361: roundabout-shaped not following roundabout rules.
|
||||
-- This will get us "At Strausberger Platz do Maneuver X" instead of multiple quick turns.
|
||||
-- In a new API version we can think of having a separate type passing it through to the user.
|
||||
if junction == "circular" then
|
||||
result.circular = true
|
||||
end
|
||||
end
|
||||
|
||||
-- determine if this way can be used as a start/end point for routing
|
||||
function WayHandlers.startpoint(profile,way,result,data)
|
||||
-- if profile specifies set of allowed start modes, then check for that
|
||||
-- otherwise require default mode
|
||||
if profile.allowed_start_modes then
|
||||
result.is_startpoint = profile.allowed_start_modes[result.forward_mode] == true or
|
||||
profile.allowed_start_modes[result.backward_mode] == true
|
||||
else
|
||||
result.is_startpoint = result.forward_mode == profile.default_mode or
|
||||
result.backward_mode == profile.default_mode
|
||||
end
|
||||
-- highway=service and access tags check
|
||||
local is_service = data.highway == "service"
|
||||
if is_service then
|
||||
if profile.service_access_tag_blacklist[data.forward_access] then
|
||||
result.is_startpoint = false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- handle turn lanes
|
||||
function WayHandlers.turn_lanes(profile,way,result,data)
|
||||
local forward, backward = get_turn_lanes(way,data)
|
||||
|
||||
if forward then
|
||||
result.turn_lanes_forward = forward
|
||||
end
|
||||
|
||||
if backward then
|
||||
result.turn_lanes_backward = backward
|
||||
end
|
||||
end
|
||||
|
||||
-- set the road classification based on guidance globals configuration
|
||||
function WayHandlers.classification(profile,way,result,data)
|
||||
set_classification(data.highway,result,way)
|
||||
end
|
||||
|
||||
-- handle destination tags
|
||||
function WayHandlers.destinations(profile,way,result,data)
|
||||
if data.is_forward_oneway or data.is_reverse_oneway then
|
||||
local destination = get_destination(way, data.is_forward_oneway)
|
||||
result.destinations = canonicalizeStringList(destination, ",")
|
||||
end
|
||||
end
|
||||
|
||||
-- handling ferries and piers
|
||||
function WayHandlers.ferries(profile,way,result,data)
|
||||
local route = data.route
|
||||
if route then
|
||||
local route_speed = profile.route_speeds[route]
|
||||
if route_speed and route_speed > 0 then
|
||||
local duration = way:get_value_by_key("duration")
|
||||
if duration and durationIsValid(duration) then
|
||||
result.duration = math.max( parseDuration(duration), 1 )
|
||||
end
|
||||
result.forward_mode = mode.ferry
|
||||
result.backward_mode = mode.ferry
|
||||
result.forward_speed = route_speed
|
||||
result.backward_speed = route_speed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- handling movable bridges
|
||||
function WayHandlers.movables(profile,way,result,data)
|
||||
local bridge = data.bridge
|
||||
if bridge then
|
||||
local bridge_speed = profile.bridge_speeds[bridge]
|
||||
if bridge_speed and bridge_speed > 0 then
|
||||
local capacity_car = way:get_value_by_key("capacity:car")
|
||||
if capacity_car ~= 0 then
|
||||
result.forward_mode = profile.default_mode
|
||||
result.backward_mode = profile.default_mode
|
||||
local duration = way:get_value_by_key("duration")
|
||||
if duration and durationIsValid(duration) then
|
||||
result.duration = math.max( parseDuration(duration), 1 )
|
||||
else
|
||||
result.forward_speed = bridge_speed
|
||||
result.backward_speed = bridge_speed
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- service roads
|
||||
function WayHandlers.service(profile,way,result,data)
|
||||
local service = way:get_value_by_key("service")
|
||||
if service then
|
||||
-- Set don't allow access to certain service roads
|
||||
if profile.service_tag_forbidden[service] then
|
||||
result.forward_mode = mode.inaccessible
|
||||
result.backward_mode = mode.inaccessible
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- all lanes restricted to hov vehicles?
|
||||
function WayHandlers.has_all_designated_hov_lanes(lanes)
|
||||
if not lanes then
|
||||
return false
|
||||
end
|
||||
-- This gmatch call effectively splits the string on | chars.
|
||||
-- we append an extra | to the end so that we can match the final part
|
||||
for lane in (lanes .. '|'):gmatch("([^|]*)|") do
|
||||
if lane and lane ~= "designated" then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
-- handle high occupancy vehicle tags
|
||||
function WayHandlers.hov(profile,way,result,data)
|
||||
-- respect user-preference for HOV
|
||||
if not profile.avoid.hov_lanes then
|
||||
return
|
||||
end
|
||||
|
||||
local hov = way:get_value_by_key("hov")
|
||||
if "designated" == hov then
|
||||
result.forward_restricted = true
|
||||
result.backward_restricted = true
|
||||
end
|
||||
|
||||
data.hov_lanes_forward, data.hov_lanes_backward = Tags.get_forward_backward_by_key(way,data,'hov:lanes')
|
||||
local all_hov_forward = WayHandlers.has_all_designated_hov_lanes(data.hov_lanes_forward)
|
||||
local all_hov_backward = WayHandlers.has_all_designated_hov_lanes(data.hov_lanes_backward)
|
||||
|
||||
-- in this case we will use turn penalties instead of filtering out
|
||||
if profile.properties.weight_name == 'routability' then
|
||||
if (all_hov_forward) then
|
||||
result.forward_restricted = true
|
||||
end
|
||||
if (all_hov_backward) then
|
||||
result.backward_restricted = true
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
-- filter out ways where all lanes are hov only
|
||||
if all_hov_forward then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
if all_hov_backward then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- set highway and access classification by user preference
|
||||
function WayHandlers.way_classification_for_turn(profile,way,result,data)
|
||||
local highway = way:get_value_by_key("highway")
|
||||
local access = way:get_value_by_key("access")
|
||||
|
||||
if highway and profile.highway_turn_classification[highway] then
|
||||
assert(profile.highway_turn_classification[highway] < 16, "highway_turn_classification must be smaller than 16")
|
||||
result.highway_turn_classification = profile.highway_turn_classification[highway]
|
||||
end
|
||||
if access and profile.access_turn_classification[access] then
|
||||
assert(profile.access_turn_classification[access] < 16, "access_turn_classification must be smaller than 16")
|
||||
result.access_turn_classification = profile.access_turn_classification[access]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- check accessibility by traversing our access tag hierarchy
|
||||
function WayHandlers.access(profile,way,result,data)
|
||||
data.forward_access, data.backward_access =
|
||||
Tags.get_forward_backward_by_set(way,data,profile.access_tags_hierarchy)
|
||||
|
||||
-- only allow a subset of roads to be treated as restricted
|
||||
if profile.restricted_highway_whitelist[data.highway] then
|
||||
if profile.restricted_access_tag_list[data.forward_access] then
|
||||
result.forward_restricted = true
|
||||
end
|
||||
|
||||
if profile.restricted_access_tag_list[data.backward_access] then
|
||||
result.backward_restricted = true
|
||||
end
|
||||
end
|
||||
|
||||
-- blacklist access tags that aren't marked as restricted
|
||||
if profile.access_tag_blacklist[data.forward_access] and not result.forward_restricted then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
if profile.access_tag_blacklist[data.backward_access] and not result.backward_restricted then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
if result.forward_mode == mode.inaccessible and result.backward_mode == mode.inaccessible then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
-- handle speed (excluding maxspeed)
|
||||
function WayHandlers.speed(profile,way,result,data)
|
||||
if result.forward_speed ~= -1 then
|
||||
return -- abort if already set, eg. by a route
|
||||
end
|
||||
|
||||
local key,value,speed = Tags.get_constant_by_key_value(way,profile.speeds)
|
||||
|
||||
if speed then
|
||||
-- set speed by way type
|
||||
result.forward_speed = speed
|
||||
result.backward_speed = speed
|
||||
else
|
||||
-- Set the avg speed on ways that are marked accessible
|
||||
if profile.access_tag_whitelist[data.forward_access] then
|
||||
result.forward_speed = profile.default_speed
|
||||
elseif data.forward_access and not profile.access_tag_blacklist[data.forward_access] then
|
||||
result.forward_speed = profile.default_speed -- fallback to the avg speed if access tag is not blacklisted
|
||||
elseif not data.forward_access and data.backward_access then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
if profile.access_tag_whitelist[data.backward_access] then
|
||||
result.backward_speed = profile.default_speed
|
||||
elseif data.backward_access and not profile.access_tag_blacklist[data.backward_access] then
|
||||
result.backward_speed = profile.default_speed -- fallback to the avg speed if access tag is not blacklisted
|
||||
elseif not data.backward_access and data.forward_access then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
if result.forward_speed == -1 and result.backward_speed == -1 and result.duration <= 0 then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
-- add class information
|
||||
function WayHandlers.classes(profile,way,result,data)
|
||||
if not profile.classes then
|
||||
return
|
||||
end
|
||||
|
||||
local allowed_classes = Set {}
|
||||
for k, v in pairs(profile.classes) do
|
||||
allowed_classes[v] = true
|
||||
end
|
||||
|
||||
local forward_toll, backward_toll = Tags.get_forward_backward_by_key(way, data, "toll")
|
||||
local forward_route, backward_route = Tags.get_forward_backward_by_key(way, data, "route")
|
||||
local tunnel = way:get_value_by_key("tunnel")
|
||||
|
||||
if allowed_classes["tunnel"] and tunnel and tunnel ~= "no" then
|
||||
result.forward_classes["tunnel"] = true
|
||||
result.backward_classes["tunnel"] = true
|
||||
end
|
||||
|
||||
if allowed_classes["toll"] and forward_toll == "yes" then
|
||||
result.forward_classes["toll"] = true
|
||||
end
|
||||
if allowed_classes["toll"] and backward_toll == "yes" then
|
||||
result.backward_classes["toll"] = true
|
||||
end
|
||||
|
||||
if allowed_classes["ferry"] and forward_route == "ferry" then
|
||||
result.forward_classes["ferry"] = true
|
||||
end
|
||||
if allowed_classes["ferry"] and backward_route == "ferry" then
|
||||
result.backward_classes["ferry"] = true
|
||||
end
|
||||
|
||||
if allowed_classes["restricted"] and result.forward_restricted then
|
||||
result.forward_classes["restricted"] = true
|
||||
end
|
||||
if allowed_classes["restricted"] and result.backward_restricted then
|
||||
result.backward_classes["restricted"] = true
|
||||
end
|
||||
|
||||
if allowed_classes["motorway"] and (data.highway == "motorway" or data.highway == "motorway_link") then
|
||||
result.forward_classes["motorway"] = true
|
||||
result.backward_classes["motorway"] = true
|
||||
end
|
||||
end
|
||||
|
||||
-- reduce speed on bad surfaces
|
||||
function WayHandlers.surface(profile,way,result,data)
|
||||
local surface = way:get_value_by_key("surface")
|
||||
local tracktype = way:get_value_by_key("tracktype")
|
||||
local smoothness = way:get_value_by_key("smoothness")
|
||||
|
||||
if surface and profile.surface_speeds[surface] then
|
||||
result.forward_speed = math.min(profile.surface_speeds[surface], result.forward_speed)
|
||||
result.backward_speed = math.min(profile.surface_speeds[surface], result.backward_speed)
|
||||
end
|
||||
if tracktype and profile.tracktype_speeds[tracktype] then
|
||||
result.forward_speed = math.min(profile.tracktype_speeds[tracktype], result.forward_speed)
|
||||
result.backward_speed = math.min(profile.tracktype_speeds[tracktype], result.backward_speed)
|
||||
end
|
||||
if smoothness and profile.smoothness_speeds[smoothness] then
|
||||
result.forward_speed = math.min(profile.smoothness_speeds[smoothness], result.forward_speed)
|
||||
result.backward_speed = math.min(profile.smoothness_speeds[smoothness], result.backward_speed)
|
||||
end
|
||||
end
|
||||
|
||||
-- scale speeds to get better average driving times
|
||||
function WayHandlers.penalties(profile,way,result,data)
|
||||
-- heavily penalize a way tagged with all HOV lanes
|
||||
-- in order to only route over them if there is no other option
|
||||
local service_penalty = 1.0
|
||||
local service = way:get_value_by_key("service")
|
||||
if service and profile.service_penalties[service] then
|
||||
service_penalty = profile.service_penalties[service]
|
||||
end
|
||||
|
||||
local width_penalty = 1.0
|
||||
local width = math.huge
|
||||
local lanes = math.huge
|
||||
local width_string = way:get_value_by_key("width")
|
||||
if width_string and tonumber(width_string:match("%d*")) then
|
||||
width = tonumber(width_string:match("%d*"))
|
||||
end
|
||||
|
||||
local lanes_string = way:get_value_by_key("lanes")
|
||||
if lanes_string and tonumber(lanes_string:match("%d*")) then
|
||||
lanes = tonumber(lanes_string:match("%d*"))
|
||||
end
|
||||
|
||||
local is_bidirectional = result.forward_mode ~= mode.inaccessible and
|
||||
result.backward_mode ~= mode.inaccessible
|
||||
|
||||
if width <= 3 or (lanes <= 1 and is_bidirectional) then
|
||||
width_penalty = 0.5
|
||||
end
|
||||
|
||||
-- Handle high frequency reversible oneways (think traffic signal controlled, changing direction every 15 minutes).
|
||||
-- Scaling speed to take average waiting time into account plus some more for start / stop.
|
||||
local alternating_penalty = 1.0
|
||||
if data.oneway == "alternating" then
|
||||
alternating_penalty = 0.4
|
||||
end
|
||||
|
||||
local sideroad_penalty = 1.0
|
||||
data.sideroad = way:get_value_by_key("side_road")
|
||||
if "yes" == data.sideroad or "rotary" == data.sideroad then
|
||||
sideroad_penalty = profile.side_road_multiplier
|
||||
end
|
||||
|
||||
local forward_penalty = math.min(service_penalty, width_penalty, alternating_penalty, sideroad_penalty)
|
||||
local backward_penalty = math.min(service_penalty, width_penalty, alternating_penalty, sideroad_penalty)
|
||||
|
||||
if profile.properties.weight_name == 'routability' then
|
||||
if result.forward_speed > 0 then
|
||||
result.forward_rate = (result.forward_speed * forward_penalty) / 3.6
|
||||
end
|
||||
if result.backward_speed > 0 then
|
||||
result.backward_rate = (result.backward_speed * backward_penalty) / 3.6
|
||||
end
|
||||
if result.duration > 0 then
|
||||
result.weight = result.duration / forward_penalty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- maxspeed and advisory maxspeed
|
||||
function WayHandlers.maxspeed(profile,way,result,data)
|
||||
local keys = Sequence { 'maxspeed:advisory', 'maxspeed', 'source:maxspeed', 'maxspeed:type' }
|
||||
local forward, backward = Tags.get_forward_backward_by_set(way,data,keys)
|
||||
forward = WayHandlers.parse_maxspeed(forward,profile)
|
||||
backward = WayHandlers.parse_maxspeed(backward,profile)
|
||||
|
||||
if forward and forward > 0 then
|
||||
result.forward_speed = forward * profile.speed_reduction
|
||||
end
|
||||
|
||||
if backward and backward > 0 then
|
||||
result.backward_speed = backward * profile.speed_reduction
|
||||
end
|
||||
end
|
||||
|
||||
function WayHandlers.parse_maxspeed(source,profile)
|
||||
if not source then
|
||||
return 0
|
||||
end
|
||||
|
||||
local n = Measure.get_max_speed(source)
|
||||
if not n then
|
||||
-- parse maxspeed like FR:urban
|
||||
source = string.lower(source)
|
||||
n = profile.maxspeed_table[source]
|
||||
if not n then
|
||||
local highway_type = string.match(source, "%a%a:(%a+)")
|
||||
n = profile.maxspeed_table_default[highway_type]
|
||||
if not n then
|
||||
n = 0
|
||||
end
|
||||
end
|
||||
end
|
||||
return n
|
||||
end
|
||||
|
||||
-- handle maxheight tags
|
||||
function WayHandlers.handle_height(profile,way,result,data)
|
||||
local keys = Sequence { 'maxheight:physical', 'maxheight' }
|
||||
local forward, backward = Tags.get_forward_backward_by_set(way,data,keys)
|
||||
forward = Measure.get_max_height(forward,way)
|
||||
backward = Measure.get_max_height(backward,way)
|
||||
|
||||
if forward and forward < profile.vehicle_height then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
if backward and backward < profile.vehicle_height then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
-- handle maxwidth tags
|
||||
function WayHandlers.handle_width(profile,way,result,data)
|
||||
local keys = Sequence { 'maxwidth:physical', 'maxwidth', 'width', 'est_width' }
|
||||
local forward, backward = Tags.get_forward_backward_by_set(way,data,keys)
|
||||
local narrow = way:get_value_by_key('narrow')
|
||||
|
||||
if ((forward and forward == 'narrow') or (narrow and narrow == 'yes')) and profile.vehicle_width > 2.2 then
|
||||
result.forward_mode = mode.inaccessible
|
||||
elseif forward then
|
||||
forward = Measure.get_max_width(forward)
|
||||
if forward and forward <= profile.vehicle_width then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
if ((backward and backward == 'narrow') or (narrow and narrow == 'yes')) and profile.vehicle_width > 2.2 then
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif backward then
|
||||
backward = Measure.get_max_width(backward)
|
||||
if backward and backward <= profile.vehicle_width then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- handle maxweight tags
|
||||
function WayHandlers.handle_weight(profile,way,result,data)
|
||||
local keys = Sequence { 'maxweight' }
|
||||
local forward, backward = Tags.get_forward_backward_by_set(way,data,keys)
|
||||
forward = Measure.get_max_weight(forward)
|
||||
backward = Measure.get_max_weight(backward)
|
||||
|
||||
if forward and forward < profile.vehicle_weight then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
if backward and backward < profile.vehicle_weight then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
-- handle maxlength tags
|
||||
function WayHandlers.handle_length(profile,way,result,data)
|
||||
local keys = Sequence { 'maxlength' }
|
||||
local forward, backward = Tags.get_forward_backward_by_set(way,data,keys)
|
||||
forward = Measure.get_max_length(forward)
|
||||
backward = Measure.get_max_length(backward)
|
||||
|
||||
if forward and forward < profile.vehicle_length then
|
||||
result.forward_mode = mode.inaccessible
|
||||
end
|
||||
|
||||
if backward and backward < profile.vehicle_length then
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
|
||||
-- handle oneways tags
|
||||
function WayHandlers.oneway(profile,way,result,data)
|
||||
if not profile.oneway_handling then
|
||||
return
|
||||
end
|
||||
|
||||
local oneway
|
||||
if profile.oneway_handling == true then
|
||||
oneway = Tags.get_value_by_prefixed_sequence(way,profile.restrictions,'oneway') or way:get_value_by_key("oneway")
|
||||
elseif profile.oneway_handling == 'specific' then
|
||||
oneway = Tags.get_value_by_prefixed_sequence(way,profile.restrictions,'oneway')
|
||||
elseif profile.oneway_handling == 'conditional' then
|
||||
-- Following code assumes that `oneway` and `oneway:conditional` tags have opposite values and takes weakest (always `no`).
|
||||
-- So if we will have:
|
||||
-- oneway=yes, oneway:conditional=no @ (condition1)
|
||||
-- oneway=no, oneway:conditional=yes @ (condition2)
|
||||
-- condition1 will be always true and condition2 will be always false.
|
||||
if way:get_value_by_key("oneway:conditional") then
|
||||
oneway = "no"
|
||||
else
|
||||
oneway = Tags.get_value_by_prefixed_sequence(way,profile.restrictions,'oneway') or way:get_value_by_key("oneway")
|
||||
end
|
||||
end
|
||||
|
||||
data.oneway = oneway
|
||||
|
||||
if oneway == "-1" then
|
||||
data.is_reverse_oneway = true
|
||||
result.forward_mode = mode.inaccessible
|
||||
elseif oneway == "yes" or
|
||||
oneway == "1" or
|
||||
oneway == "true" then
|
||||
data.is_forward_oneway = true
|
||||
result.backward_mode = mode.inaccessible
|
||||
elseif profile.oneway_handling == true then
|
||||
local junction = way:get_value_by_key("junction")
|
||||
if data.highway == "motorway" or
|
||||
junction == "roundabout" or
|
||||
junction == "circular" then
|
||||
if oneway ~= "no" then
|
||||
-- implied oneway
|
||||
data.is_forward_oneway = true
|
||||
result.backward_mode = mode.inaccessible
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function WayHandlers.weights(profile,way,result,data)
|
||||
if profile.properties.weight_name == 'distance' then
|
||||
result.weight = -1
|
||||
-- set weight rates to 1 for the distance weight, edge weights are distance / rate
|
||||
if (result.forward_mode ~= mode.inaccessible and result.forward_speed > 0) then
|
||||
result.forward_rate = 1
|
||||
end
|
||||
if (result.backward_mode ~= mode.inaccessible and result.backward_speed > 0) then
|
||||
result.backward_rate = 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- handle general avoid rules
|
||||
|
||||
function WayHandlers.avoid_ways(profile,way,result,data)
|
||||
if profile.avoid[data.highway] then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
-- handle various that can block access
|
||||
function WayHandlers.blocked_ways(profile,way,result,data)
|
||||
|
||||
-- areas
|
||||
if profile.avoid.area and way:get_value_by_key("area") == "yes" then
|
||||
return false
|
||||
end
|
||||
|
||||
-- toll roads
|
||||
if profile.avoid.toll and way:get_value_by_key("toll") == "yes" then
|
||||
return false
|
||||
end
|
||||
|
||||
-- don't route over steps
|
||||
if profile.avoid.steps and data.highway == "steps" then
|
||||
return false
|
||||
end
|
||||
|
||||
-- construction
|
||||
-- TODO if highway is valid then we shouldn't check railway, and vica versa
|
||||
if profile.avoid.construction and (data.highway == 'construction' or way:get_value_by_key('railway') == 'construction') then
|
||||
return false
|
||||
end
|
||||
|
||||
-- In addition to the highway=construction tag above handle the construction=* tag
|
||||
-- http://wiki.openstreetmap.org/wiki/Key:construction
|
||||
-- https://taginfo.openstreetmap.org/keys/construction#values
|
||||
if profile.avoid.construction then
|
||||
local construction = way:get_value_by_key('construction')
|
||||
|
||||
-- Of course there are negative tags to handle, too
|
||||
if construction and not profile.construction_whitelist[construction] then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
-- Not only are there multiple construction tags there is also a proposed=* tag.
|
||||
-- http://wiki.openstreetmap.org/wiki/Key:proposed
|
||||
-- https://taginfo.openstreetmap.org/keys/proposed#values
|
||||
if profile.avoid.proposed and way:get_value_by_key('proposed') then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Reversible oneways change direction with low frequency (think twice a day):
|
||||
-- do not route over these at all at the moment because of time dependence.
|
||||
-- Note: alternating (high frequency) oneways are handled below with penalty.
|
||||
if profile.avoid.reversible and way:get_value_by_key("oneway") == "reversible" then
|
||||
return false
|
||||
end
|
||||
|
||||
-- impassables
|
||||
if profile.avoid.impassable then
|
||||
if way:get_value_by_key("impassable") == "yes" then
|
||||
return false
|
||||
end
|
||||
|
||||
if way:get_value_by_key("status") == "impassable" then
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function WayHandlers.driving_side(profile, way, result, data)
|
||||
local driving_side = way:get_value_by_key('driving_side')
|
||||
if driving_side == nil then
|
||||
driving_side = way:get_location_tag('driving_side')
|
||||
end
|
||||
|
||||
if driving_side == 'left' then
|
||||
result.is_left_hand_driving = true
|
||||
elseif driving_side == 'right' then
|
||||
result.is_left_hand_driving = false
|
||||
else
|
||||
result.is_left_hand_driving = profile.properties.left_hand_driving
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- Call a sequence of handlers, aborting in case a handler returns false. Example:
|
||||
--
|
||||
-- handlers = Sequence {
|
||||
-- WayHandlers.tag_prefetch,
|
||||
-- WayHandlers.default_mode,
|
||||
-- WayHandlers.blocked_ways,
|
||||
-- WayHandlers.access,
|
||||
-- WayHandlers.speed,
|
||||
-- WayHandlers.names
|
||||
-- }
|
||||
--
|
||||
-- WayHandlers.run(handlers,way,result,data,profile)
|
||||
--
|
||||
-- Each method in the list will be called on the WayHandlers object.
|
||||
-- All handlers must accept the parameteres (profile, way, result, data, relations) and return false
|
||||
-- if the handler chain should be aborted.
|
||||
-- To ensure the correct order of method calls, use a Sequence of handler names.
|
||||
|
||||
function WayHandlers.run(profile, way, result, data, handlers, relations)
|
||||
for i,handler in ipairs(handlers) do
|
||||
if handler(profile, way, result, data, relations) == false then
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return WayHandlers
|
504
admiral-router/vehicles/mk.lua
Normal file
504
admiral-router/vehicles/mk.lua
Normal file
@ -0,0 +1,504 @@
|
||||
-- Car profile
|
||||
|
||||
api_version = 4
|
||||
|
||||
Set = require('lib/set')
|
||||
Sequence = require('lib/sequence')
|
||||
Handlers = require("lib/way_handlers")
|
||||
Relations = require("lib/relations")
|
||||
find_access_tag = require("lib/access").find_access_tag
|
||||
limit = require("lib/maxspeed").limit
|
||||
Utils = require("lib/utils")
|
||||
Measure = require("lib/measure")
|
||||
|
||||
function setup()
|
||||
return {
|
||||
properties = {
|
||||
max_speed_for_map_matching = 100/3.6, -- 180kmph -> m/s
|
||||
-- For routing based on duration, but weighted for preferring certain roads
|
||||
-- weight_name = 'routability',
|
||||
-- For shortest duration without penalties for accessibility
|
||||
weight_name = 'duration',
|
||||
-- For shortest distance without penalties for accessibility
|
||||
-- weight_name = 'distance',
|
||||
process_call_tagless_node = false,
|
||||
u_turn_penalty = 20,
|
||||
continue_straight_at_waypoint = true,
|
||||
use_turn_restrictions = true,
|
||||
left_hand_driving = false,
|
||||
traffic_light_penalty = 2,
|
||||
},
|
||||
|
||||
default_mode = mode.driving,
|
||||
default_speed = 23,
|
||||
oneway_handling = true,
|
||||
side_road_multiplier = 0.9,
|
||||
turn_penalty = 4,
|
||||
speed_reduction = 0.9,
|
||||
turn_bias = 1.05,
|
||||
cardinal_directions = false,
|
||||
|
||||
-- Size of the vehicle, to be limited by physical restriction of the way
|
||||
vehicle_height = 1.5, -- in meters, 2.0m is the height slightly above biggest SUVs
|
||||
vehicle_width = 1.0, -- in meters, ways with narrow tag are considered narrower than 2.2m
|
||||
|
||||
-- Size of the vehicle, to be limited mostly by legal restriction of the way
|
||||
vehicle_length = 2, -- in meters, 4.8m is the length of large or family car
|
||||
vehicle_weight = 200, -- in kilograms
|
||||
|
||||
-- a list of suffixes to suppress in name change instructions. The suffixes also include common substrings of each other
|
||||
suffix_list = {
|
||||
'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East', 'Nor', 'Sou', 'We', 'Ea'
|
||||
},
|
||||
|
||||
barrier_whitelist = Set {
|
||||
'cattle_grid',
|
||||
'border_control',
|
||||
'toll_booth',
|
||||
'sally_port',
|
||||
'gate',
|
||||
'lift_gate',
|
||||
'no',
|
||||
'entrance',
|
||||
'height_restrictor',
|
||||
'arch'
|
||||
},
|
||||
|
||||
access_tag_whitelist = Set {
|
||||
'yes',
|
||||
'motorcar',
|
||||
"motorcycle",
|
||||
'motor_vehicle',
|
||||
'vehicle',
|
||||
'permissive',
|
||||
'designated',
|
||||
'hov'
|
||||
},
|
||||
|
||||
access_tag_blacklist = Set {
|
||||
'no',
|
||||
'destination'
|
||||
},
|
||||
|
||||
-- tags disallow access to in combination with highway=service
|
||||
service_access_tag_blacklist = Set {
|
||||
'private'
|
||||
},
|
||||
|
||||
restricted_access_tag_list = Set {
|
||||
'destination',
|
||||
},
|
||||
|
||||
access_tags_hierarchy = Sequence {
|
||||
'motorcar',
|
||||
'motor_vehicle',
|
||||
'vehicle',
|
||||
'access'
|
||||
},
|
||||
|
||||
service_tag_forbidden = Set {
|
||||
},
|
||||
|
||||
restrictions = Sequence {
|
||||
'motorcar',
|
||||
'motor_vehicle',
|
||||
'vehicle'
|
||||
},
|
||||
|
||||
classes = Sequence {
|
||||
'toll', 'motorway', 'ferry', 'restricted', 'tunnel'
|
||||
},
|
||||
|
||||
-- classes to support for exclude flags
|
||||
excludable = Sequence {
|
||||
Set {'toll'},
|
||||
Set {'motorway'},
|
||||
Set {'ferry'}
|
||||
},
|
||||
|
||||
avoid = Set {
|
||||
'area',
|
||||
-- 'toll', -- uncomment this to avoid tolls
|
||||
'reversible',
|
||||
'impassable',
|
||||
'hov_lanes',
|
||||
'steps',
|
||||
'construction',
|
||||
'proposed'
|
||||
},
|
||||
|
||||
speeds = Sequence {
|
||||
highway = {
|
||||
motorway = 100,
|
||||
motorway_link = 50,
|
||||
trunk = 90,
|
||||
trunk_link = 40,
|
||||
primary = 50,
|
||||
primary_link = 30,
|
||||
secondary = 50,
|
||||
secondary_link = 30,
|
||||
tertiary = 25,
|
||||
tertiary_link = 25,
|
||||
unclassified = 25,
|
||||
track = 20,
|
||||
residential = 14,
|
||||
living_street = 10,
|
||||
service = 10
|
||||
}
|
||||
},
|
||||
|
||||
service_penalties = {
|
||||
alley = 0.5,
|
||||
parking = 0.5,
|
||||
parking_aisle = 0.5,
|
||||
driveway = 0.5,
|
||||
["drive-through"] = 0.5,
|
||||
["drive-thru"] = 0.5
|
||||
},
|
||||
|
||||
restricted_highway_whitelist = Set {
|
||||
'motorway',
|
||||
'motorway_link',
|
||||
'trunk',
|
||||
'trunk_link',
|
||||
'primary',
|
||||
'primary_link',
|
||||
'secondary',
|
||||
'secondary_link',
|
||||
'tertiary',
|
||||
'tertiary_link',
|
||||
'residential',
|
||||
'living_street',
|
||||
'unclassified',
|
||||
'service',
|
||||
'track'
|
||||
},
|
||||
|
||||
construction_whitelist = Set {
|
||||
'no',
|
||||
'widening',
|
||||
'minor',
|
||||
},
|
||||
|
||||
route_speeds = {
|
||||
ferry = 5,
|
||||
shuttle_train = 10
|
||||
},
|
||||
|
||||
bridge_speeds = {
|
||||
movable = 5
|
||||
},
|
||||
|
||||
-- surface/trackype/smoothness
|
||||
-- values were estimated from looking at the photos at the relevant wiki pages
|
||||
|
||||
-- max speed for surfaces
|
||||
surface_speeds = {
|
||||
asphalt = nil, -- nil mean no limit. removing the line has the same effect
|
||||
concrete = nil,
|
||||
["concrete:plates"] = nil,
|
||||
["concrete:lanes"] = nil,
|
||||
paved = nil,
|
||||
|
||||
cement = 80,
|
||||
compacted = 80,
|
||||
fine_gravel = 80,
|
||||
|
||||
paving_stones = 60,
|
||||
metal = 60,
|
||||
bricks = 60,
|
||||
|
||||
grass = 40,
|
||||
wood = 40,
|
||||
sett = 40,
|
||||
grass_paver = 40,
|
||||
gravel = 40,
|
||||
unpaved = 40,
|
||||
ground = 40,
|
||||
dirt = 40,
|
||||
pebblestone = 40,
|
||||
tartan = 40,
|
||||
|
||||
cobblestone = 30,
|
||||
clay = 30,
|
||||
|
||||
earth = 20,
|
||||
stone = 20,
|
||||
rocky = 20,
|
||||
sand = 20,
|
||||
|
||||
mud = 10
|
||||
},
|
||||
|
||||
-- max speed for tracktypes
|
||||
tracktype_speeds = {
|
||||
grade1 = 60,
|
||||
grade2 = 40,
|
||||
grade3 = 30,
|
||||
grade4 = 25,
|
||||
grade5 = 20
|
||||
},
|
||||
|
||||
-- max speed for smoothnesses
|
||||
smoothness_speeds = {
|
||||
intermediate = 80,
|
||||
bad = 40,
|
||||
very_bad = 20,
|
||||
horrible = 10,
|
||||
very_horrible = 5,
|
||||
impassable = 0
|
||||
},
|
||||
|
||||
-- http://wiki.openstreetmap.org/wiki/Speed_limits
|
||||
maxspeed_table_default = {
|
||||
urban = 50,
|
||||
rural = 90,
|
||||
trunk = 100,
|
||||
motorway = 100
|
||||
},
|
||||
|
||||
-- List only exceptions
|
||||
maxspeed_table = {
|
||||
["at:rural"] = 100,
|
||||
["at:trunk"] = 100,
|
||||
["be:motorway"] = 120,
|
||||
["be-bru:rural"] = 70,
|
||||
["be-bru:urban"] = 30,
|
||||
["be-vlg:rural"] = 70,
|
||||
["by:urban"] = 60,
|
||||
["by:motorway"] = 100,
|
||||
["ch:rural"] = 80,
|
||||
["ch:trunk"] = 100,
|
||||
["ch:motorway"] = 100,
|
||||
["cz:trunk"] = 0,
|
||||
["cz:motorway"] = 0,
|
||||
["de:living_street"] = 7,
|
||||
["de:rural"] = 100,
|
||||
["de:motorway"] = 0,
|
||||
["dk:rural"] = 80,
|
||||
["fr:rural"] = 80,
|
||||
["gb:nsl_single"] = (60*1609)/1000,
|
||||
["gb:nsl_dual"] = (70*1609)/1000,
|
||||
["gb:motorway"] = (70*1609)/1000,
|
||||
["nl:rural"] = 80,
|
||||
["nl:trunk"] = 100,
|
||||
['no:rural'] = 80,
|
||||
['no:motorway'] = 100,
|
||||
['pl:rural'] = 100,
|
||||
['pl:trunk'] = 100,
|
||||
['pl:motorway'] = 100,
|
||||
["ro:trunk"] = 100,
|
||||
["ru:living_street"] = 20,
|
||||
["ru:urban"] = 60,
|
||||
["ru:motorway"] = 100,
|
||||
["uk:nsl_single"] = (60*1609)/1000,
|
||||
["uk:nsl_dual"] = (70*1609)/1000,
|
||||
["uk:motorway"] = (70*1609)/1000,
|
||||
['za:urban'] = 60,
|
||||
['za:rural'] = 100,
|
||||
["none"] = 100
|
||||
},
|
||||
|
||||
relation_types = Sequence {
|
||||
"route"
|
||||
},
|
||||
|
||||
-- classify highway tags when necessary for turn weights
|
||||
highway_turn_classification = {
|
||||
},
|
||||
|
||||
-- classify access tags when necessary for turn weights
|
||||
access_turn_classification = {
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
function process_node(profile, node, result, relations)
|
||||
-- parse access and barrier tags
|
||||
local access = find_access_tag(node, profile.access_tags_hierarchy)
|
||||
if access then
|
||||
if profile.access_tag_blacklist[access] and not profile.restricted_access_tag_list[access] then
|
||||
result.barrier = true
|
||||
end
|
||||
else
|
||||
local barrier = node:get_value_by_key("barrier")
|
||||
if barrier then
|
||||
-- check height restriction barriers
|
||||
local restricted_by_height = false
|
||||
if barrier == 'height_restrictor' then
|
||||
local maxheight = Measure.get_max_height(node:get_value_by_key("maxheight"), node)
|
||||
restricted_by_height = maxheight and maxheight < profile.vehicle_height
|
||||
end
|
||||
|
||||
-- make an exception for rising bollard barriers
|
||||
local bollard = node:get_value_by_key("bollard")
|
||||
local rising_bollard = bollard and "rising" == bollard
|
||||
|
||||
-- make an exception for lowered/flat barrier=kerb
|
||||
-- and incorrect tagging of highway crossing kerb as highway barrier
|
||||
local kerb = node:get_value_by_key("kerb")
|
||||
local highway = node:get_value_by_key("highway")
|
||||
local flat_kerb = kerb and ("lowered" == kerb or "flush" == kerb)
|
||||
local highway_crossing_kerb = barrier == "kerb" and highway and highway == "crossing"
|
||||
|
||||
if not profile.barrier_whitelist[barrier]
|
||||
and not rising_bollard
|
||||
and not flat_kerb
|
||||
and not highway_crossing_kerb
|
||||
or restricted_by_height then
|
||||
result.barrier = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if node is a traffic light
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if "traffic_signals" == tag then
|
||||
result.traffic_lights = true
|
||||
end
|
||||
end
|
||||
|
||||
function process_way(profile, way, result, relations)
|
||||
-- the intial filtering of ways based on presence of tags
|
||||
-- affects processing times significantly, because all ways
|
||||
-- have to be checked.
|
||||
-- to increase performance, prefetching and intial tag check
|
||||
-- is done in directly instead of via a handler.
|
||||
|
||||
-- in general we should try to abort as soon as
|
||||
-- possible if the way is not routable, to avoid doing
|
||||
-- unnecessary work. this implies we should check things that
|
||||
-- commonly forbids access early, and handle edge cases later.
|
||||
|
||||
-- data table for storing intermediate values during processing
|
||||
local data = {
|
||||
-- prefetch tags
|
||||
highway = way:get_value_by_key('highway'),
|
||||
bridge = way:get_value_by_key('bridge'),
|
||||
route = way:get_value_by_key('route')
|
||||
}
|
||||
|
||||
-- perform an quick initial check and abort if the way is
|
||||
-- obviously not routable.
|
||||
-- highway or route tags must be in data table, bridge is optional
|
||||
if (not data.highway or data.highway == '') and
|
||||
(not data.route or data.route == '')
|
||||
then
|
||||
return
|
||||
end
|
||||
|
||||
handlers = Sequence {
|
||||
-- set the default mode for this profile. if can be changed later
|
||||
-- in case it turns we're e.g. on a ferry
|
||||
WayHandlers.default_mode,
|
||||
|
||||
-- check various tags that could indicate that the way is not
|
||||
-- routable. this includes things like status=impassable,
|
||||
-- toll=yes and oneway=reversible
|
||||
WayHandlers.blocked_ways,
|
||||
WayHandlers.avoid_ways,
|
||||
WayHandlers.handle_height,
|
||||
WayHandlers.handle_width,
|
||||
WayHandlers.handle_length,
|
||||
WayHandlers.handle_weight,
|
||||
|
||||
-- determine access status by checking our hierarchy of
|
||||
-- access tags, e.g: motorcar, motor_vehicle, vehicle
|
||||
WayHandlers.access,
|
||||
|
||||
-- check whether forward/backward directions are routable
|
||||
WayHandlers.oneway,
|
||||
|
||||
-- check a road's destination
|
||||
WayHandlers.destinations,
|
||||
|
||||
-- check whether we're using a special transport mode
|
||||
WayHandlers.ferries,
|
||||
WayHandlers.movables,
|
||||
|
||||
-- handle service road restrictions
|
||||
WayHandlers.service,
|
||||
|
||||
-- handle hov
|
||||
WayHandlers.hov,
|
||||
|
||||
-- compute speed taking into account way type, maxspeed tags, etc.
|
||||
WayHandlers.speed,
|
||||
WayHandlers.maxspeed,
|
||||
WayHandlers.surface,
|
||||
WayHandlers.penalties,
|
||||
|
||||
-- compute class labels
|
||||
WayHandlers.classes,
|
||||
|
||||
-- handle turn lanes and road classification, used for guidance
|
||||
WayHandlers.turn_lanes,
|
||||
WayHandlers.classification,
|
||||
|
||||
-- handle various other flags
|
||||
WayHandlers.roundabouts,
|
||||
WayHandlers.startpoint,
|
||||
WayHandlers.driving_side,
|
||||
|
||||
-- set name, ref and pronunciation
|
||||
WayHandlers.names,
|
||||
|
||||
-- set weight properties of the way
|
||||
WayHandlers.weights,
|
||||
|
||||
-- set classification of ways relevant for turns
|
||||
WayHandlers.way_classification_for_turn
|
||||
}
|
||||
|
||||
WayHandlers.run(profile, way, result, data, handlers, relations)
|
||||
|
||||
if profile.cardinal_directions then
|
||||
Relations.process_way_refs(way, relations, result)
|
||||
end
|
||||
end
|
||||
|
||||
function process_turn(profile, turn)
|
||||
-- Use a sigmoid function to return a penalty that maxes out at turn_penalty
|
||||
-- over the space of 0-180 degrees. Values here were chosen by fitting
|
||||
-- the function to some turn penalty samples from real driving.
|
||||
local turn_penalty = profile.turn_penalty
|
||||
local turn_bias = turn.is_left_hand_driving and 1. / profile.turn_bias or profile.turn_bias
|
||||
|
||||
if turn.has_traffic_light then
|
||||
turn.duration = profile.properties.traffic_light_penalty
|
||||
end
|
||||
|
||||
if turn.number_of_roads > 2 or turn.source_mode ~= turn.target_mode or turn.is_u_turn then
|
||||
if turn.angle >= 0 then
|
||||
turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 / turn_bias) * turn.angle/180 - 6.5*turn_bias)))
|
||||
else
|
||||
turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 * turn_bias) * -turn.angle/180 - 6.5/turn_bias)))
|
||||
end
|
||||
|
||||
if turn.is_u_turn then
|
||||
turn.duration = turn.duration + profile.properties.u_turn_penalty
|
||||
end
|
||||
end
|
||||
|
||||
-- for distance based routing we don't want to have penalties based on turn angle
|
||||
if profile.properties.weight_name == 'distance' then
|
||||
turn.weight = 0
|
||||
else
|
||||
turn.weight = turn.duration
|
||||
end
|
||||
|
||||
if profile.properties.weight_name == 'routability' then
|
||||
-- penalize turns from non-local access only segments onto local access only tags
|
||||
if not turn.source_restricted and turn.target_restricted then
|
||||
turn.weight = constants.max_turn_weight
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
return {
|
||||
setup = setup,
|
||||
process_way = process_way,
|
||||
process_node = process_node,
|
||||
process_turn = process_turn
|
||||
}
|
264
admiral-router/vehicles/walk.lua
Normal file
264
admiral-router/vehicles/walk.lua
Normal file
@ -0,0 +1,264 @@
|
||||
-- Foot profile
|
||||
|
||||
api_version = 2
|
||||
|
||||
Set = require('lib/set')
|
||||
Sequence = require('lib/sequence')
|
||||
Handlers = require("lib/way_handlers")
|
||||
find_access_tag = require("lib/access").find_access_tag
|
||||
|
||||
function setup()
|
||||
local max_speed = 5
|
||||
local walking_speed = 5
|
||||
return {
|
||||
properties = {
|
||||
weight_name = 'duration',
|
||||
max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s
|
||||
call_tagless_node_function = false,
|
||||
traffic_light_penalty = 2,
|
||||
u_turn_penalty = 2,
|
||||
continue_straight_at_waypoint = false,
|
||||
use_turn_restrictions = false,
|
||||
},
|
||||
|
||||
default_mode = mode.walking,
|
||||
default_speed = walking_speed,
|
||||
oneway_handling = 'specific', -- respect 'oneway:foot' but not 'oneway'
|
||||
|
||||
barrier_blacklist = Set {
|
||||
'yes',
|
||||
'wall',
|
||||
'fence'
|
||||
},
|
||||
|
||||
access_tag_whitelist = Set {
|
||||
'yes',
|
||||
'foot',
|
||||
'permissive',
|
||||
'designated'
|
||||
},
|
||||
|
||||
access_tag_blacklist = Set {
|
||||
'no',
|
||||
},
|
||||
|
||||
restricted_access_tag_list = Set { },
|
||||
|
||||
restricted_highway_whitelist = Set { },
|
||||
|
||||
construction_whitelist = Set {},
|
||||
|
||||
access_tags_hierarchy = Sequence {
|
||||
'foot',
|
||||
'access'
|
||||
},
|
||||
|
||||
-- tags disallow access to in combination with highway=service
|
||||
service_access_tag_blacklist = Set { },
|
||||
|
||||
restrictions = Sequence {
|
||||
'foot'
|
||||
},
|
||||
|
||||
-- list of suffixes to suppress in name change instructions
|
||||
suffix_list = Set {
|
||||
'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East'
|
||||
},
|
||||
|
||||
avoid = Set {
|
||||
'impassable'
|
||||
},
|
||||
|
||||
speeds = Sequence {
|
||||
highway = {
|
||||
primary = walking_speed,
|
||||
primary_link = walking_speed,
|
||||
secondary = walking_speed,
|
||||
secondary_link = walking_speed,
|
||||
tertiary = walking_speed,
|
||||
tertiary_link = walking_speed,
|
||||
unclassified = walking_speed,
|
||||
residential = walking_speed,
|
||||
road = walking_speed,
|
||||
living_street = walking_speed,
|
||||
service = walking_speed,
|
||||
track = walking_speed,
|
||||
path = walking_speed,
|
||||
steps = walking_speed,
|
||||
pedestrian = walking_speed,
|
||||
footway = walking_speed,
|
||||
pier = walking_speed,
|
||||
},
|
||||
|
||||
railway = {
|
||||
platform = walking_speed
|
||||
},
|
||||
|
||||
amenity = {
|
||||
parking = walking_speed,
|
||||
parking_entrance= walking_speed
|
||||
},
|
||||
|
||||
man_made = {
|
||||
pier = walking_speed
|
||||
},
|
||||
|
||||
leisure = {
|
||||
track = walking_speed
|
||||
}
|
||||
},
|
||||
|
||||
route_speeds = {
|
||||
ferry = 5
|
||||
},
|
||||
|
||||
bridge_speeds = {
|
||||
},
|
||||
|
||||
surface_speeds = {
|
||||
fine_gravel = walking_speed*0.75,
|
||||
gravel = walking_speed*0.75,
|
||||
pebblestone = walking_speed*0.75,
|
||||
mud = walking_speed*0.5,
|
||||
sand = walking_speed*0.5
|
||||
},
|
||||
|
||||
tracktype_speeds = {
|
||||
},
|
||||
|
||||
smoothness_speeds = {
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
function process_node(profile, node, result)
|
||||
-- parse access and barrier tags
|
||||
local access = find_access_tag(node, profile.access_tags_hierarchy)
|
||||
if access then
|
||||
if profile.access_tag_blacklist[access] then
|
||||
result.barrier = true
|
||||
end
|
||||
else
|
||||
local barrier = node:get_value_by_key("barrier")
|
||||
if barrier then
|
||||
-- make an exception for rising bollard barriers
|
||||
local bollard = node:get_value_by_key("bollard")
|
||||
local rising_bollard = bollard and "rising" == bollard
|
||||
|
||||
if profile.barrier_blacklist[barrier] and not rising_bollard then
|
||||
result.barrier = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- check if node is a traffic light
|
||||
local tag = node:get_value_by_key("highway")
|
||||
if "traffic_signals" == tag then
|
||||
result.traffic_lights = true
|
||||
end
|
||||
end
|
||||
|
||||
-- main entry point for processsing a way
|
||||
function process_way(profile, way, result)
|
||||
-- the intial filtering of ways based on presence of tags
|
||||
-- affects processing times significantly, because all ways
|
||||
-- have to be checked.
|
||||
-- to increase performance, prefetching and intial tag check
|
||||
-- is done in directly instead of via a handler.
|
||||
|
||||
-- in general we should try to abort as soon as
|
||||
-- possible if the way is not routable, to avoid doing
|
||||
-- unnecessary work. this implies we should check things that
|
||||
-- commonly forbids access early, and handle edge cases later.
|
||||
|
||||
-- data table for storing intermediate values during processing
|
||||
local data = {
|
||||
-- prefetch tags
|
||||
highway = way:get_value_by_key('highway'),
|
||||
bridge = way:get_value_by_key('bridge'),
|
||||
route = way:get_value_by_key('route'),
|
||||
leisure = way:get_value_by_key('leisure'),
|
||||
man_made = way:get_value_by_key('man_made'),
|
||||
railway = way:get_value_by_key('railway'),
|
||||
platform = way:get_value_by_key('platform'),
|
||||
amenity = way:get_value_by_key('amenity'),
|
||||
public_transport = way:get_value_by_key('public_transport')
|
||||
}
|
||||
|
||||
-- perform an quick initial check and abort if the way is
|
||||
-- obviously not routable. here we require at least one
|
||||
-- of the prefetched tags to be present, ie. the data table
|
||||
-- cannot be empty
|
||||
if next(data) == nil then -- is the data table empty?
|
||||
return
|
||||
end
|
||||
|
||||
local handlers = Sequence {
|
||||
-- set the default mode for this profile. if can be changed later
|
||||
-- in case it turns we're e.g. on a ferry
|
||||
WayHandlers.default_mode,
|
||||
|
||||
-- check various tags that could indicate that the way is not
|
||||
-- routable. this includes things like status=impassable,
|
||||
-- toll=yes and oneway=reversible
|
||||
WayHandlers.blocked_ways,
|
||||
|
||||
-- determine access status by checking our hierarchy of
|
||||
-- access tags, e.g: motorcar, motor_vehicle, vehicle
|
||||
WayHandlers.access,
|
||||
|
||||
-- check whether forward/backward directons are routable
|
||||
WayHandlers.oneway,
|
||||
|
||||
-- check whether forward/backward directons are routable
|
||||
WayHandlers.destinations,
|
||||
|
||||
-- check whether we're using a special transport mode
|
||||
WayHandlers.ferries,
|
||||
WayHandlers.movables,
|
||||
|
||||
-- compute speed taking into account way type, maxspeed tags, etc.
|
||||
WayHandlers.speed,
|
||||
WayHandlers.surface,
|
||||
|
||||
-- handle turn lanes and road classification, used for guidance
|
||||
WayHandlers.classification,
|
||||
|
||||
-- handle various other flags
|
||||
WayHandlers.roundabouts,
|
||||
WayHandlers.startpoint,
|
||||
|
||||
-- set name, ref and pronunciation
|
||||
WayHandlers.names,
|
||||
|
||||
-- set weight properties of the way
|
||||
WayHandlers.weights
|
||||
}
|
||||
|
||||
WayHandlers.run(profile, way, result, data, handlers)
|
||||
end
|
||||
|
||||
function process_turn (profile, turn)
|
||||
turn.duration = 0.
|
||||
|
||||
if turn.direction_modifier == direction_modifier.u_turn then
|
||||
turn.duration = turn.duration + profile.properties.u_turn_penalty
|
||||
end
|
||||
|
||||
if turn.has_traffic_light then
|
||||
turn.duration = profile.properties.traffic_light_penalty
|
||||
end
|
||||
if profile.properties.weight_name == 'routability' then
|
||||
-- penalize turns from non-local access only segments onto local access only tags
|
||||
if not turn.source_restricted and turn.target_restricted then
|
||||
turn.weight = turn.weight + 3000
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
setup = setup,
|
||||
process_way = process_way,
|
||||
process_node = process_node,
|
||||
process_turn = process_turn
|
||||
}
|
8
admiral-worker/.editorconfig
Normal file
8
admiral-worker/.editorconfig
Normal file
@ -0,0 +1,8 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = tab
|
||||
insert_final_newline = true
|
||||
max_line_length = 150
|
||||
tab_width = 4
|
||||
trim_trailing_whitespace = true
|
25
admiral-worker/.gitignore
vendored
Normal file
25
admiral-worker/.gitignore
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
.idea
|
||||
.vscode
|
||||
|
||||
# Secrets
|
||||
.env*
|
||||
|
||||
# Auto generated elements
|
||||
/logs
|
||||
venv
|
||||
.coverage
|
||||
htmlcov
|
||||
|
||||
# Big files
|
||||
!.gitkeep
|
||||
data/graph-hopper/**
|
||||
data/updater/**
|
||||
data/ftp/**
|
||||
data/worker/**
|
||||
data/snapshot/**
|
||||
data/mock/**
|
||||
app/notebooks/inputs/**
|
||||
app/notebooks/outputs/**
|
||||
app/notebooks/calculated_stop_times/**
|
||||
# Custom files for valgalla
|
||||
custom_files
|
5
admiral-worker/Dockerfile
Normal file
5
admiral-worker/Dockerfile
Normal file
@ -0,0 +1,5 @@
|
||||
FROM python:3.11
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN pip install -r requirements.txt
|
||||
ENTRYPOINT ["python", "cli"]
|
23
admiral-worker/Makefile
Normal file
23
admiral-worker/Makefile
Normal file
@ -0,0 +1,23 @@
|
||||
include buildSrc/common.mk
|
||||
|
||||
.PHONY: api app core data tests
|
||||
|
||||
init: ## start virtual environment and install dev. requirements
|
||||
#sudo apt install python3-virtualenv
|
||||
|
||||
rm -fr $(VIRTUAL_ENV)
|
||||
virtualenv -p python3 $(VIRTUAL_ENV)
|
||||
$(MAKE) install
|
||||
|
||||
install: ## install development libs
|
||||
pip install -r requirements.txt
|
||||
|
||||
tests: ## execute test suite
|
||||
python3 -m unittest discover tests "test_*.py"
|
||||
|
||||
coverage: ## create HTML coverage report
|
||||
coverage run --source=app,core --omit=core/services/*,core/repos/* -m unittest discover tests "test_*.py" && coverage html
|
||||
|
||||
export PYTHONPATH=$PYTHONPATH:.:.venv/bin/
|
||||
run_optimization_worker:
|
||||
.venv/bin/python ./cli/run_optimization_worker.py
|
363
admiral-worker/app/App.py
Normal file
363
admiral-worker/app/App.py
Normal file
@ -0,0 +1,363 @@
|
||||
from typing import Optional
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from app.repos.sql.CrnMicroUpdateSqlRepo import CrnMicroUpdateSqlRepo
|
||||
from app.repos.sql.GpsServiceTimeSqlRepo import GpsServiceTimeSqlRepo
|
||||
from app.repos.sql.GpsSqlRepo import GpsSqlRepo
|
||||
from app.repos.sql.GpsStopTimeSqlRepo import GpsStopTimeSqlRepo
|
||||
from app.repos.sql.GpsDeliverySqlRepo import GpsDeliverySqlRepo
|
||||
from app.repos.sql.OptimizationMetricsSqlRepo import OptimizationMetricsSqlRepo
|
||||
from app.services.FsFtpService import FsFtpService
|
||||
from app.services.OsrmRoutingService import OsrmRoutingService
|
||||
from app.services.SolvesallOptimizationService import SolvesallOptimizationService
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.repos.CrnMicroUpdateRepo import CrnMicroUpdateRepo
|
||||
from core.repos.GpsDeliveryRepo import GpsDeliveryRepo
|
||||
from core.repos.GpsRepo import GpsRepo
|
||||
from core.repos.GpsServiceTimeRepo import GpsServiceTimeRepo
|
||||
from core.repos.GpsStopTimeRepo import GpsStopTimeRepo
|
||||
from core.types.Id import Id
|
||||
from core.usecases.Print_file_hashes import Print_file_hashes
|
||||
from core.usecases.Run_gps_worker import Run_gps_worker
|
||||
from core.usecases.gps_worker.Analyze_delivery_data import Analyze_delivery_data
|
||||
from core.usecases.gps_worker.Analyze_gps_data import Analyze_gps_data
|
||||
from core.usecases.gps_worker.Match_crnPoints_with_allLandPlots import Match_crnPoints_with_allLandPlots
|
||||
from core.usecases.gps_worker.Match_crnPoints_with_landPlots import Match_crnPoints_with_landPlots
|
||||
from core.usecases.gps_worker.Update_service_times import Update_service_times
|
||||
from core.usecases.optimization_worker.Update_optimization_points import Update_optimization_points
|
||||
from core.usecases.updating_worker.Test_transport_matrix import Test_transport_matrix
|
||||
|
||||
load_dotenv()
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from app.Env import Env
|
||||
from app.repos.sql.GpsSessionSqlRepo import GpsSessionSqlRepo
|
||||
from app.repos.sql.OptimizationResultSqlRepo import OptimizationResultSqlRepo
|
||||
from app.repos.sql.OptimizationSqlRepo import OptimizationSqlRepo
|
||||
from app.repos.sql.OptimizationVehicleSqlRepo import OptimizationVehicleSqlRepo
|
||||
from app.repos.sql.PostOfficeSqlRepo import PostOfficeSqlRepo
|
||||
from app.repos.sql.WorkerJobLogSqlRepo import WorkerJobLogSqlRepo
|
||||
from app.repos.sql.WorkerJobSqlRepo import WorkerJobSqlRepo
|
||||
from app.repos.sql.WorkerJobStatusSqlRepo import WorkerJobStatusSqlRepo
|
||||
from app.repos.sql.WorkerLogSqlRepo import WorkerLogSqlRepo
|
||||
from app.repos.sql.WorkerSqlRepo import WorkerSqlRepo
|
||||
from app.repos.sql.WorkerStatusSqlRepo import WorkerStatusSqlRepo
|
||||
from app.services.EProstorLandService import EProstorLandService
|
||||
from app.services.PostaApiService import PostaApiService
|
||||
from app.services.PsutilSystemService import PsutilSystemService
|
||||
from core.domain.worker.WorkerJob import WorkerJob
|
||||
from core.repos.GpsSessionRepo import GpsSessionRepo
|
||||
from core.repos.OptimizationRepo import OptimizationRepo
|
||||
from core.repos.OptimizationResultRepo import OptimizationResultRepo
|
||||
from core.repos.OptimizationVehicleRepo import OptimizationVehicleRepo
|
||||
from core.repos.PostOfficeRepo import PostOfficeRepo
|
||||
from core.repos.WorkerJobLogRepo import WorkerJobLogRepo
|
||||
from core.repos.WorkerJobRepo import WorkerJobRepo
|
||||
from core.repos.WorkerJobStatusRepo import WorkerJobStatusRepo
|
||||
from core.repos.WorkerLogRepo import WorkerLogRepo
|
||||
from core.repos.WorkerRepo import WorkerRepo
|
||||
from core.repos.WorkerStatusRepo import WorkerStatusRepo
|
||||
from core.services.FtpService import FtpService
|
||||
from core.services.LandService import LandService
|
||||
from core.services.OptimizationService import OptimizationService
|
||||
from core.services.PostaService import PostaService
|
||||
from core.services.RoutingService import RoutingService
|
||||
from core.services.SystemService import SystemService
|
||||
from core.usecases.Run_optimization_worker import Run_optimization_worker
|
||||
from core.usecases.Run_updating_worker import Run_updating_worker
|
||||
from core.usecases.optimization_worker.Read_optimization_files import Read_optimization_files
|
||||
from core.usecases.updating_worker.Calculate_optimization_points import Calculate_optimization_points
|
||||
from core.usecases.updating_worker.Write_optimization_files import Write_optimization_files
|
||||
from core.usecases.initialization.Register_worker import Register_worker
|
||||
from core.usecases.optimization_worker.Run_optimization_job import Run_optimization_job
|
||||
from core.usecases.logging.Log_worker_job_status import Log_worker_job_status
|
||||
from core.usecases.logging.Log_worker_status import Log_worker_status
|
||||
|
||||
|
||||
class App:
|
||||
# REPOS
|
||||
class repos:
|
||||
optimizationResultRepo: OptimizationResultRepo = None
|
||||
optimizationRepo: OptimizationRepo = None
|
||||
optimizationVehicleRepo: OptimizationVehicleRepo = None
|
||||
workerRepo: WorkerRepo = None
|
||||
workerLogRepo: WorkerLogRepo = None
|
||||
workerStatusRepo: WorkerStatusRepo = None
|
||||
workerJobRepo: WorkerJobRepo = None
|
||||
workerJobStatusRepo: WorkerJobStatusRepo = None
|
||||
workerJobLogRepo: WorkerJobLogRepo = None
|
||||
gpsSessionRepo: GpsSessionRepo = None
|
||||
gpsRepo: GpsRepo = None
|
||||
gpsStopTimeRepo: GpsStopTimeRepo = None
|
||||
gpsServiceTimeRepo: GpsServiceTimeRepo = None
|
||||
gpsDeliveryRepo: GpsDeliveryRepo = None
|
||||
postOfficeRepo: PostOfficeRepo = None
|
||||
crnMicroUpdateRepo: CrnMicroUpdateRepo = None
|
||||
|
||||
# SERVICES
|
||||
class services:
|
||||
postaService: PostaService = None
|
||||
systemService: SystemService = None
|
||||
ftpService: FtpService = None
|
||||
routingService: RoutingService = None
|
||||
landService: LandService = None
|
||||
optimizationService: OptimizationService = None
|
||||
|
||||
# USE CASES
|
||||
class usecases:
|
||||
run_gps_worker: Run_gps_worker = None
|
||||
print_file_hashes: Print_file_hashes = None
|
||||
analyze_gps_data: Analyze_gps_data = None
|
||||
analyze_delivery_data: Analyze_delivery_data = None
|
||||
update_service_times: Update_service_times = None
|
||||
match_crnPoints_with_landPlots: Match_crnPoints_with_landPlots = None
|
||||
match_crnPoints_with_allLandPlots: Match_crnPoints_with_allLandPlots = None
|
||||
test_transport_matrix: Test_transport_matrix = None
|
||||
log_worker_job_status: Log_worker_job_status = None
|
||||
log_worker_status: Log_worker_status = None
|
||||
register_worker: Register_worker = None
|
||||
run_optimization_worker: Run_optimization_worker = None
|
||||
run_updating_worker: Run_updating_worker = None
|
||||
write_optimization_files: Write_optimization_files = None
|
||||
calculate_optimization_points: Calculate_optimization_points = None
|
||||
read_optimization_files: Read_optimization_files = None
|
||||
update_optimization_points: Update_optimization_points = None
|
||||
|
||||
@staticmethod
|
||||
def init_log_worker_status(id: Id[Worker]):
|
||||
db = create_engine(Env.DB_URL)
|
||||
workerStatusRepo = WorkerStatusSqlRepo(db=db)
|
||||
workerRepo = WorkerSqlRepo(db=db)
|
||||
systemService = PsutilSystemService()
|
||||
log_worker_status = Log_worker_status(
|
||||
workerRepo=workerRepo,
|
||||
workerStatusRepo=workerStatusRepo,
|
||||
systemService=systemService,
|
||||
)
|
||||
log_worker_status.now(id=id)
|
||||
|
||||
@staticmethod
|
||||
def init_run_worker_optimization_job(workerJob: WorkerJob):
|
||||
db = create_engine(Env.DB_URL)
|
||||
|
||||
# REPOS
|
||||
optimizationRepo = OptimizationSqlRepo(db=db)
|
||||
optimizationResultRepo = OptimizationResultSqlRepo(db=db)
|
||||
optimizationVehicleRepo = OptimizationVehicleSqlRepo(db=db)
|
||||
workerJobStatusRepo = WorkerJobStatusSqlRepo(db=db)
|
||||
workerJobLogRepo = WorkerJobLogSqlRepo(db=db)
|
||||
workerLogRepo = WorkerLogSqlRepo(db=db)
|
||||
postOfficeRepo = PostOfficeSqlRepo(db=db)
|
||||
gpsStopTimeRepo = GpsStopTimeSqlRepo(db=db)
|
||||
gpsServiceTimeRepo = GpsServiceTimeSqlRepo(db=db)
|
||||
optimizationMetricsRepo = OptimizationMetricsSqlRepo(db=db)
|
||||
gpsSessionRepo = GpsSessionSqlRepo(db=db)
|
||||
|
||||
# SERVICES
|
||||
ftpService = FsFtpService()
|
||||
systemService = PsutilSystemService()
|
||||
routingService = OsrmRoutingService(domain=Env.ROUTING_DOMAIN_OSRM)
|
||||
optimizationService = SolvesallOptimizationService()
|
||||
postaService = PostaApiService(
|
||||
useMocks=Env.POSTA_API_USE_MOCKS,
|
||||
authDomain=Env.POSTA_API_AUTH,
|
||||
crnDomain=Env.POSTA_API_CRN,
|
||||
deliveryDomain=Env.POSTA_API_DELIVERY,
|
||||
username=Env.POSTA_API_USERNAME,
|
||||
password=Env.POSTA_API_PASSWORD,
|
||||
authToken=Env.POSTA_API_AUTH_TOKEN,
|
||||
crnToken=Env.POSTA_API_CRN_TOKEN,
|
||||
deliveryToken=Env.POSTA_API_DELIVERY_TOKEN,
|
||||
)
|
||||
|
||||
|
||||
# USE CASES
|
||||
log_worker_job_status = Log_worker_job_status(
|
||||
workerJobStatusRepo=workerJobStatusRepo,
|
||||
systemService=systemService,
|
||||
)
|
||||
|
||||
update_service_times = Update_service_times(
|
||||
gpsStopTimeRepo=gpsStopTimeRepo,
|
||||
gpsServiceTimeRepo=gpsServiceTimeRepo,
|
||||
postaService=postaService,
|
||||
gpsSessionRepo=gpsSessionRepo,
|
||||
)
|
||||
|
||||
read_optimization_files = Read_optimization_files(ftpService=ftpService)
|
||||
|
||||
update_optimization_points = Update_optimization_points(
|
||||
gpsStopTimeRepo=gpsStopTimeRepo,
|
||||
gpsServiceTimeRepo=gpsServiceTimeRepo,
|
||||
update_service_times=update_service_times
|
||||
)
|
||||
|
||||
run_worker_optimization_job = Run_optimization_job(
|
||||
optimizationMetricsRepo=optimizationMetricsRepo,
|
||||
systemService=systemService,
|
||||
workerLogRepo=workerLogRepo,
|
||||
workerJobLogRepo=workerJobLogRepo,
|
||||
log_worker_job_status=log_worker_job_status,
|
||||
optimizationVehicleRepo=optimizationVehicleRepo,
|
||||
optimizationRepo=optimizationRepo,
|
||||
postOfficeRepo=postOfficeRepo,
|
||||
read_optimization_files=read_optimization_files,
|
||||
optimizationResultRepo=optimizationResultRepo,
|
||||
optimizationService=optimizationService,
|
||||
routingService=routingService,
|
||||
update_optimization_points=update_optimization_points
|
||||
)
|
||||
|
||||
run_worker_optimization_job.now(workerJob=workerJob)
|
||||
|
||||
@staticmethod
|
||||
def init(maxCrnPoints: Optional[int] = None):
|
||||
db = create_engine(Env.DB_URL)
|
||||
|
||||
# REPOS
|
||||
App.repos.optimizationRepo = OptimizationSqlRepo(db=db)
|
||||
App.repos.optimizationResultRepo = OptimizationResultSqlRepo(db=db)
|
||||
App.repos.optimizationVehicleRepo = OptimizationVehicleSqlRepo(db=db)
|
||||
App.repos.workerRepo = WorkerSqlRepo(db=db)
|
||||
App.repos.workerLogRepo = WorkerLogSqlRepo(db=db)
|
||||
App.repos.workerStatusRepo = WorkerStatusSqlRepo(db=db)
|
||||
App.repos.workerJobRepo = WorkerJobSqlRepo(db=db)
|
||||
App.repos.workerJobStatusRepo = WorkerJobStatusSqlRepo(db=db)
|
||||
App.repos.workerJobLogRepo = WorkerJobLogSqlRepo(db=db)
|
||||
App.repos.gpsSessionRepo = GpsSessionSqlRepo(db=db)
|
||||
App.repos.gpsRepo = GpsSqlRepo(db=db)
|
||||
App.repos.gpsStopTimeRepo = GpsStopTimeSqlRepo(db=db)
|
||||
App.repos.gpsServiceTimeRepo = GpsServiceTimeSqlRepo(db=db)
|
||||
App.repos.postOfficeRepo = PostOfficeSqlRepo(db=db)
|
||||
App.repos.crnMicroUpdateRepo = CrnMicroUpdateSqlRepo(db=db)
|
||||
App.repos.gpsDeliveryRepo = GpsDeliverySqlRepo(db=db)
|
||||
|
||||
# SERVICES
|
||||
App.services.routingService = OsrmRoutingService(domain=Env.ROUTING_DOMAIN_OSRM)
|
||||
App.services.systemService = PsutilSystemService()
|
||||
App.services.optimizationService = SolvesallOptimizationService()
|
||||
App.services.landService = EProstorLandService()
|
||||
App.services.postaService = PostaApiService(
|
||||
useMocks=Env.POSTA_API_USE_MOCKS,
|
||||
authDomain=Env.POSTA_API_AUTH,
|
||||
crnDomain=Env.POSTA_API_CRN,
|
||||
deliveryDomain=Env.POSTA_API_DELIVERY,
|
||||
username=Env.POSTA_API_USERNAME,
|
||||
password=Env.POSTA_API_PASSWORD,
|
||||
authToken=Env.POSTA_API_AUTH_TOKEN,
|
||||
crnToken=Env.POSTA_API_CRN_TOKEN,
|
||||
maxCrnPoints=maxCrnPoints,
|
||||
deliveryToken=Env.POSTA_API_DELIVERY_TOKEN,
|
||||
)
|
||||
App.services.ftpService = FsFtpService()
|
||||
|
||||
# USE CASES
|
||||
App.usecases.print_file_hashes = Print_file_hashes()
|
||||
|
||||
App.usecases.log_worker_job_status = Log_worker_job_status(
|
||||
workerJobStatusRepo=App.repos.workerJobStatusRepo,
|
||||
systemService=App.services.systemService,
|
||||
)
|
||||
|
||||
App.usecases.update_optimization_points = Update_optimization_points(
|
||||
gpsStopTimeRepo=App.repos.gpsStopTimeRepo,
|
||||
gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo,
|
||||
update_service_times=App.usecases.update_service_times
|
||||
)
|
||||
|
||||
App.usecases.log_worker_status = Log_worker_status(
|
||||
workerRepo=App.repos.workerRepo,
|
||||
workerStatusRepo=App.repos.workerStatusRepo,
|
||||
systemService=App.services.systemService
|
||||
)
|
||||
App.usecases.register_worker = Register_worker(
|
||||
workerRepo=App.repos.workerRepo,
|
||||
workerStatusRepo=App.repos.workerStatusRepo,
|
||||
workerLogRepo=App.repos.workerLogRepo,
|
||||
systemService=App.services.systemService,
|
||||
)
|
||||
App.usecases.run_optimization_worker = Run_optimization_worker(
|
||||
register_worker=App.usecases.register_worker,
|
||||
workerLogRepo=App.repos.workerLogRepo,
|
||||
optimizationRepo=App.repos.optimizationRepo,
|
||||
workerJobRepo=App.repos.workerJobRepo,
|
||||
init_run_optimization_job=App.usecases.init_run_worker_optimization_job,
|
||||
init_log_worker_status=App.usecases.init_log_worker_status,
|
||||
systemService=App.services.systemService,
|
||||
workerRepo=App.repos.workerRepo
|
||||
)
|
||||
|
||||
App.usecases.read_optimization_files = Read_optimization_files(ftpService=App.services.ftpService)
|
||||
App.usecases.calculate_optimization_points = Calculate_optimization_points(
|
||||
gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo,
|
||||
crnMicroUpdateRepo=App.repos.crnMicroUpdateRepo,
|
||||
)
|
||||
App.usecases.test_transport_matrix = Test_transport_matrix(routingService=App.services.routingService)
|
||||
App.usecases.match_crnPoints_with_landPlots = Match_crnPoints_with_landPlots(
|
||||
landService=App.services.landService,
|
||||
postaService=App.services.postaService,
|
||||
)
|
||||
App.usecases.match_crnPoints_with_allLandPlots = Match_crnPoints_with_allLandPlots(
|
||||
landService=App.services.landService,
|
||||
postaService=App.services.postaService,
|
||||
)
|
||||
App.usecases.write_optimization_files = Write_optimization_files(
|
||||
postOfficeRepo=App.repos.postOfficeRepo,
|
||||
crnMicroUpdateRepo=App.repos.crnMicroUpdateRepo,
|
||||
postaService=App.services.postaService,
|
||||
ftpService=App.services.ftpService,
|
||||
routingService=App.services.routingService,
|
||||
calculate_optimization_points=App.usecases.calculate_optimization_points,
|
||||
match_crnPoints_with_landPlots=App.usecases.match_crnPoints_with_landPlots
|
||||
)
|
||||
App.usecases.run_updating_worker = Run_updating_worker(
|
||||
register_worker=App.usecases.register_worker,
|
||||
workerLogRepo=App.repos.workerLogRepo,
|
||||
postOfficeRepo=App.repos.postOfficeRepo,
|
||||
write_optimization_files=App.usecases.write_optimization_files,
|
||||
gpsSessionRepo=App.repos.gpsSessionRepo,
|
||||
init_log_worker_status=App.usecases.init_log_worker_status,
|
||||
ftpService=App.services.ftpService,
|
||||
)
|
||||
|
||||
App.usecases.analyze_gps_data = Analyze_gps_data(gpsRepo=App.repos.gpsRepo, postaService=App.services.postaService)
|
||||
App.usecases.analyze_delivery_data = Analyze_delivery_data(postaService=App.services.postaService)
|
||||
|
||||
App.usecases.update_service_times = Update_service_times(
|
||||
gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo,
|
||||
gpsStopTimeRepo=App.repos.gpsStopTimeRepo,
|
||||
postaService=App.services.postaService,
|
||||
gpsSessionRepo=App.repos.gpsSessionRepo
|
||||
)
|
||||
|
||||
App.usecases.run_gps_worker = Run_gps_worker(
|
||||
init_log_worker_status=App.usecases.init_log_worker_status,
|
||||
register_worker=App.usecases.register_worker,
|
||||
workerLogRepo=App.repos.workerLogRepo,
|
||||
analyze_gps_data=App.usecases.analyze_gps_data,
|
||||
gpsSessionRepo=App.repos.gpsSessionRepo,
|
||||
gpsStopTimeRepo=App.repos.gpsStopTimeRepo,
|
||||
crnMicroUpdateRepo=App.repos.crnMicroUpdateRepo,
|
||||
update_service_times=App.usecases.update_service_times,
|
||||
analyze_delivery_data=App.usecases.analyze_delivery_data,
|
||||
gpsDeliveryRepo=App.repos.gpsDeliveryRepo,
|
||||
gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo,
|
||||
match_crn_points_with_land_plots=App.usecases.match_crnPoints_with_landPlots,
|
||||
postaService=App.services.postaService
|
||||
)
|
||||
|
||||
App.__check()
|
||||
|
||||
@staticmethod
|
||||
def __check():
|
||||
not_inited = []
|
||||
for group in [App.repos, App.services, App.usecases]:
|
||||
for key, value in group.__dict__.items():
|
||||
if not str(key).startswith("_") and key not in ['init']:
|
||||
if value is None:
|
||||
not_inited.append(key)
|
||||
|
||||
if len(not_inited) > 0:
|
||||
raise Exception(f"App dependencies not inited: {not_inited}")
|
22
admiral-worker/app/Env.py
Normal file
22
admiral-worker/app/Env.py
Normal file
@ -0,0 +1,22 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class Env:
|
||||
|
||||
DB_URL: str = os.getenv("DB_URL")
|
||||
|
||||
POSTA_API_USE_MOCKS: bool = bool(int(os.getenv("POSTA_API_USE_MOCKS")))
|
||||
POSTA_API_AUTH: str = os.getenv("POSTA_API_AUTH")
|
||||
POSTA_API_CRN: str = os.getenv("POSTA_API_CRN")
|
||||
POSTA_API_DELIVERY: str = os.getenv("POSTA_API_DELIVERY")
|
||||
POSTA_API_USERNAME: str = os.getenv("POSTA_API_USERNAME")
|
||||
POSTA_API_PASSWORD: str = os.getenv("POSTA_API_PASSWORD")
|
||||
POSTA_API_AUTH_TOKEN: str = os.getenv("POSTA_API_AUTH_TOKEN")
|
||||
POSTA_API_CRN_TOKEN: str = os.getenv("POSTA_API_CRN_TOKEN")
|
||||
POSTA_API_DELIVERY_TOKEN: str = os.getenv("POSTA_API_DELIVERY_TOKEN")
|
||||
|
||||
ROUTING_DOMAIN_OSRM: str = os.getenv("ROUTING_DOMAIN_OSRM")
|
260
admiral-worker/app/algorithms/OrToolsOptimizationService.py
Normal file
260
admiral-worker/app/algorithms/OrToolsOptimizationService.py
Normal file
@ -0,0 +1,260 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Literal, Optional
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pydantic import BaseModel, PositiveInt, PositiveFloat, NonNegativeInt, model_validator, ConfigDict
|
||||
from pydantic.alias_generators import to_camel
|
||||
from typing_extensions import Self
|
||||
from typing_extensions import override
|
||||
|
||||
from app.algorithms import solver_or
|
||||
from core.types.Logger import Logger
|
||||
|
||||
|
||||
class BaseSchema(BaseModel):
|
||||
model_config = ConfigDict(
|
||||
alias_generator=to_camel, populate_by_name=True, extra="allow"
|
||||
)
|
||||
|
||||
|
||||
class OrToolsOptimizationVehicle(BaseSchema):
|
||||
id: NonNegativeInt
|
||||
name: str
|
||||
|
||||
route_type: str
|
||||
capacity: PositiveInt
|
||||
range_km: PositiveFloat
|
||||
working_time_h: Optional[PositiveFloat] = 8.0
|
||||
districts: list[str]
|
||||
priority: Optional[bool] = False
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_values(self) -> Self:
|
||||
# assert 0 < self.range_km <= 1_000, f"Range should be between 0 and 1000 km."
|
||||
# assert 0 < self.capacity <= 1_000
|
||||
# assert 0 < self.working_time_h <= 10, f"Max working time is 10h."
|
||||
return self
|
||||
|
||||
|
||||
class OrToolsOptimizationPoint(BaseSchema):
|
||||
id: NonNegativeInt
|
||||
hisa_id: str
|
||||
|
||||
service_time_sec: NonNegativeInt
|
||||
demand: Optional[NonNegativeInt] = 1
|
||||
freq: Optional[float] = 1.0
|
||||
type: Literal['crn', 'depot', 'refill']
|
||||
|
||||
lat: float
|
||||
lon: float
|
||||
|
||||
district: Optional[str] = None
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_values(self) -> Self:
|
||||
# TODO: assert 0 <= self.service_time_sec <= 1200, f"Service time too large: {self.service_time_sec}."
|
||||
assert 0 <= self.demand < 1_000, f"Demand too large {self.demand}"
|
||||
assert 0 <= self.freq <= 1, f"Frequency not between 0 and 1.0 {self.freq}"
|
||||
assert self.type != 'depot' or self.district is None, "Depot can't have an assigned district."
|
||||
assert 45 <= self.lat <= 47 and 13 <= self.lon <= 17, f"Invalid coordinates {self.lat}, {self.lon}"
|
||||
|
||||
return self
|
||||
|
||||
|
||||
def to_np_array_int(df, col):
|
||||
df = df.sort_values('start_hisa')
|
||||
n = len(df['start_hisa'].unique())
|
||||
dm = np.full((n, n), 10 ** 9, dtype=int)
|
||||
dm[df['start_index'], df['end_index']] = df[col]
|
||||
return dm
|
||||
|
||||
|
||||
class OrToolsOptimizationInstance(BaseSchema):
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
class JsonEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, OrToolsOptimizationInstance):
|
||||
return obj.model_dump()
|
||||
if isinstance(obj, np.ndarray):
|
||||
all = {}
|
||||
for y, line in enumerate(obj.tolist()):
|
||||
for x, ele in enumerate(line):
|
||||
all[f"{y}_{x}"] = ele
|
||||
return all
|
||||
if isinstance(obj, dict):
|
||||
return str(obj)
|
||||
return super().default(obj)
|
||||
|
||||
vehicles: list[OrToolsOptimizationVehicle]
|
||||
points: list[OrToolsOptimizationPoint]
|
||||
distance_matrix: dict[str, np.ndarray]
|
||||
time_matrix: dict[str, np.ndarray]
|
||||
initial_routes: Optional[list[list[NonNegativeInt]]]
|
||||
district_percentage: Optional[float] = 0.0
|
||||
log: Logger
|
||||
|
||||
# time_dist_data: dict[Literal['bike', 'car', 'foot'], pd.DataFrame]
|
||||
|
||||
# @computed_field(return_type=dict[Literal['bike', 'car', 'foot'], np.ndarray])
|
||||
# @cached_property
|
||||
# def distance_matrix(self):
|
||||
# return {key: to_np_array_int(df, 'distance') for key, df in self.time_dist_data.items()}
|
||||
|
||||
# @computed_field(return_type=dict[Literal['bike', 'car', 'foot'], np.ndarray])
|
||||
# @cached_property
|
||||
# def time_matrix(self):
|
||||
# return {key: to_np_array_int(df, 'duration') for key, df in self.time_dist_data.items()}
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_values(self) -> Self:
|
||||
availableCapacity = sum([o.capacity for o in self.vehicles])
|
||||
requiredCapacity = sum([o.demand for o in self.points])
|
||||
assert availableCapacity >= requiredCapacity, f"Available capacity '{availableCapacity}' is less than required capacity '{requiredCapacity}'"
|
||||
|
||||
if self.district_percentage is not None:
|
||||
assert 0.0 <= self.district_percentage <= 1.0, f"District percentage has to be between 0 and 1 (float)."
|
||||
|
||||
# for k, df in self.time_dist_data.items():
|
||||
# assert set(df['start_index']) == set(df['end_index']), "Sources and destinations should be the same."
|
||||
|
||||
for k, v in self.distance_matrix.items():
|
||||
assert len(self.points) == v.shape[0], f"Number of points({len(self.points)} should be same as distance_matrix size({v.shape[0]})"
|
||||
assert v.shape[0] == v.shape[1], "Both dimensions of distance_matrix should be of equal size"
|
||||
assert all(np.array(sorted([x.id for x in self.points])) == np.arange(v.shape[0])), "Point.id should be its index in distance_matrix."
|
||||
|
||||
for k, v in self.time_matrix.items():
|
||||
assert len(self.points) == v.shape[0], "Number of points should be same as time_matrix size"
|
||||
assert v.shape[0] == v.shape[1], "Both dimensions of time_matrix should be of equal size"
|
||||
|
||||
assert all(np.issubdtype(v.dtype, np.floating) for v in self.distance_matrix.values()), "Distance matrix should be of type np.integer"
|
||||
assert all(np.issubdtype(v.dtype, np.floating) for v in self.time_matrix.values()), "Time matrix should be of type np.integer"
|
||||
|
||||
for k, v in self.distance_matrix.items():
|
||||
# assert v.max() <= 100_000, f"Some values in distance_matrix '{k}' are larger than 100 km."
|
||||
# assert v.mean() >= 1_000, f"Mean of values in distance_matrix '{k}' is smaller than 1000 m. Check why are values so big!"
|
||||
if v.max() > 100_000:
|
||||
self.log.warning(f"Some values in distance_matrix '{k}' are to big: {v.max()}")
|
||||
if v.mean() < 1_000:
|
||||
self.log.warning(f"Mean of values in distance_matrix '{k}' are to big: {v.mean()}")
|
||||
|
||||
# TODO: check matrix
|
||||
return self
|
||||
|
||||
|
||||
class OrToolsOptimizationSolution(BaseSchema):
|
||||
vehicle_id: NonNegativeInt
|
||||
dummy: bool
|
||||
hisa_ids: list[str]
|
||||
distance: NonNegativeInt
|
||||
duration: timedelta
|
||||
cost: NonNegativeInt
|
||||
district: Optional[str] = None # TODO: solver_or needs to assign district names when doing exact optimization!!!
|
||||
|
||||
|
||||
class OrToolsOptimizationConfig(BaseSchema):
|
||||
objective: Literal['distance', 'time'] = 'time'
|
||||
vehicle_cost: Optional[int] = solver_or.VEHICLE_COST
|
||||
|
||||
district_penalty: NonNegativeInt = 0
|
||||
district_mode: Literal['single', 'subsets', 'hard'] = 'soft'
|
||||
|
||||
set_initial: bool = False
|
||||
|
||||
useDistrictCentrality: bool = True
|
||||
|
||||
|
||||
class OrToolsOptimizationService:
|
||||
"""
|
||||
Main class for doing optimization
|
||||
"""
|
||||
|
||||
@override
|
||||
def vrpOptimization(
|
||||
self,
|
||||
solving_time_sec: int,
|
||||
instance: OrToolsOptimizationInstance,
|
||||
config: OrToolsOptimizationConfig,
|
||||
log: Logger,
|
||||
solution_callback_fn=lambda objective, raw_solution, overlapping: None,
|
||||
stop_callback_fn=lambda: False,
|
||||
) -> tuple[int, list[OrToolsOptimizationSolution], dict[int, float]]:
|
||||
|
||||
log.info("Mapping optimizationVehicles")
|
||||
opta_vehicles = pd.DataFrame([x.__dict__ for x in instance.vehicles])
|
||||
opta_vehicles['cost'] = config.vehicle_cost
|
||||
opta_vehicles.loc[opta_vehicles['priority'], 'cost'] = solver_or.VEHICLE_PRIORITY_COST
|
||||
opta_vehicles['max_time'] = (opta_vehicles['working_time_h'] * 3600).astype(int)
|
||||
opta_vehicles['range'] = (opta_vehicles['range_km'] * 1000).astype(int)
|
||||
|
||||
if solver_or.VEHICLE_DUPLICATE_FACTOR > 1:
|
||||
vn = len(opta_vehicles)
|
||||
opta_vehicles = pd.concat([opta_vehicles] * solver_or.VEHICLE_DUPLICATE_FACTOR).reset_index(drop=True)
|
||||
opta_vehicles.loc[opta_vehicles.index[vn:], 'cost'] = solver_or.VEHICLE_DUPLICATE_COST
|
||||
log.info("Mapping optimization points")
|
||||
opta_points = pd.DataFrame([x.__dict__ for x in instance.points])
|
||||
opta_points['service_time'] = opta_points['service_time_sec']
|
||||
opta_points['base_point'] = np.arange(len(opta_points))
|
||||
|
||||
opta_instance = solver_or.VrpInstance(opta_vehicles, opta_points, instance.distance_matrix, instance.time_matrix, instance.initial_routes,
|
||||
instance.district_percentage)
|
||||
|
||||
def calculate_overlapping(solution, instance) -> Optional[dict[int, float]]:
|
||||
if not config.set_initial:
|
||||
return None
|
||||
overlapping: dict[int, float] = {}
|
||||
for id, vehicle, type, route, total_distance, total_time, total_cost, num_points, orig_id, dummy in solution.to_records(index=False):
|
||||
if len(instance.initial_routes) == orig_id:
|
||||
break
|
||||
initial_route = set(instance.initial_routes[orig_id])
|
||||
route = set(route)
|
||||
addedPoints = route - initial_route
|
||||
if len(initial_route) > 0:
|
||||
overlapping[int(vehicle)] = round(100 * len(addedPoints) / len(initial_route), 3)
|
||||
return overlapping
|
||||
|
||||
def map_raw_solution(objective: int, solution: pd.DataFrame) -> tuple[int, list[OrToolsOptimizationSolution], dict[int, float]]:
|
||||
solution = solution[solution['total_distance'] > 0].copy()
|
||||
|
||||
solution['orig_id'] = solution['vehicle'].apply(lambda x: x % len(instance.vehicles))
|
||||
solution = solution.reset_index()
|
||||
solution['dummy'] = solution['orig_id'].duplicated()
|
||||
|
||||
readings = [kwargs for kwargs in solution.to_dict(orient='records')]
|
||||
|
||||
optimizationSolutions = []
|
||||
id2point = {x.id: x for x in instance.points}
|
||||
|
||||
for reading in readings:
|
||||
hisa_ids = [id2point[i].hisa_id for i in reading['route']]
|
||||
vehicle_id = reading['orig_id']
|
||||
dummy = reading['dummy']
|
||||
|
||||
optimizationSolutions.append(
|
||||
OrToolsOptimizationSolution(
|
||||
vehicle_id=vehicle_id,
|
||||
dummy=dummy,
|
||||
hisa_ids=hisa_ids,
|
||||
distance=reading['total_distance'],
|
||||
duration=timedelta(seconds=reading['total_time']),
|
||||
cost=reading['total_cost']
|
||||
)
|
||||
)
|
||||
|
||||
return objective, optimizationSolutions, calculate_overlapping(solution=solution, instance=instance)
|
||||
|
||||
log.info(f"Solving VRP with points (without depot): {len(opta_instance.nodes) - 1}")
|
||||
objective, solution = solver_or.solve(
|
||||
opta_instance, config, solving_time_sec,
|
||||
solution_callback_fn=lambda objec, raw_solution: solution_callback_fn(*map_raw_solution(objective=objec, solution=raw_solution)),
|
||||
stop_callback_fn=stop_callback_fn,
|
||||
log=log
|
||||
)
|
||||
|
||||
obj, sol, overlap = map_raw_solution(objective=objective, solution=solution)
|
||||
log.info(f"VRP solved with points (without depot): {sum([len(s.hisa_ids) - 1 for s in sol])}")
|
||||
return obj, sol, overlap
|
554
admiral-worker/app/algorithms/solver_or.py
Normal file
554
admiral-worker/app/algorithms/solver_or.py
Normal file
@ -0,0 +1,554 @@
|
||||
import math
|
||||
import sys
|
||||
import weakref
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from threading import Timer
|
||||
from typing import Callable
|
||||
|
||||
import pandas as pd
|
||||
from ortools.constraint_solver import pywrapcp, routing_enums_pb2
|
||||
from pandas import DataFrame
|
||||
|
||||
from core.types.Logger import Logger
|
||||
|
||||
VEHICLE_COST = 16 * 3600 # Two working days.
|
||||
VEHICLE_PRIORITY_COST = 0 # Vehicle with priority has zero cost.
|
||||
VEHICLE_DUPLICATE_COST = 100_000_000
|
||||
VEHICLE_DUPLICATE_FACTOR = 2
|
||||
"""
|
||||
id name route_type ... cost max_time range
|
||||
0 0 Kolo z pomožnim motorjem kpm ... 57600 3600 60000
|
||||
1 1 Motorno kolo mk ... 0 3600 120000
|
||||
2 2 Kolo z motorjem km ... 57600 3600 120000
|
||||
3 3 Kolo bike ... 57600 3600 30000
|
||||
4 4 Elektricni tro/štiri kolesnik ev ... 57600 3600 120000
|
||||
5 5 Pes foot ... 57600 3600 6000
|
||||
6 6 Avtomobil car ... 57600 3600 150000
|
||||
7 0 Kolo z pomožnim motorjem kpm ... 100000000 3600 60000
|
||||
8 1 Motorno kolo mk ... 100000000 3600 120000
|
||||
9 2 Kolo z motorjem km ... 100000000 3600 120000
|
||||
10 3 Kolo bike ... 100000000 3600 30000
|
||||
11 4 Elektricni tro/štiri kolesnik ev ... 100000000 3600 120000
|
||||
12 5 Pes foot ... 100000000 3600 6000
|
||||
13 6 Avtomobil car ... 100000000 3600 150000
|
||||
"""
|
||||
|
||||
|
||||
@dataclass
|
||||
class VrpInstance:
|
||||
"""
|
||||
Main "Instance" of the data to optimize
|
||||
"""
|
||||
vehicles: pd.DataFrame
|
||||
nodes: pd.DataFrame
|
||||
dist: dict
|
||||
time: dict
|
||||
initial_routes: list[list[int]]
|
||||
district_percentage: float
|
||||
|
||||
def read_solution(
|
||||
manager: pywrapcp.RoutingIndexManager,
|
||||
routing: pywrapcp.RoutingModel,
|
||||
instance: VrpInstance,
|
||||
distance_evaluators: dict[callable],
|
||||
time_evaluators: dict[callable],
|
||||
):
|
||||
routes = []
|
||||
|
||||
for vehicle_id, route_type in enumerate(instance.vehicles["route_type"]):
|
||||
distance_evaluator = distance_evaluators[route_type]
|
||||
time_evaluator = time_evaluators[route_type]
|
||||
|
||||
points = []
|
||||
route_distance = 0
|
||||
route_time = 0
|
||||
route_cost = 0
|
||||
|
||||
index = routing.Start(vehicle_id)
|
||||
while not routing.IsEnd(index):
|
||||
previous_index = index
|
||||
index = routing.NextVar(index).Value()
|
||||
|
||||
route_distance += distance_evaluator(previous_index, index)
|
||||
route_time += time_evaluator(previous_index, index)
|
||||
route_cost += routing.GetArcCostForVehicle(previous_index, index, vehicle_id)
|
||||
|
||||
node = manager.IndexToNode(index)
|
||||
point = instance.nodes.base_point.iloc[node]
|
||||
points.append(point)
|
||||
|
||||
routes.append(
|
||||
{
|
||||
"vehicle": vehicle_id,
|
||||
"type": instance.vehicles.iloc[vehicle_id]["route_type"],
|
||||
"route": points,
|
||||
"total_distance": route_distance,
|
||||
"total_time": route_time,
|
||||
"total_cost": route_cost,
|
||||
"num_points": len(points),
|
||||
}
|
||||
)
|
||||
|
||||
routes = pd.DataFrame(routes)
|
||||
return routes
|
||||
|
||||
|
||||
class RepeatTimer(Timer):
|
||||
def run(self):
|
||||
while not self.finished.wait(self.interval):
|
||||
self.function()
|
||||
|
||||
|
||||
class SolutionCallback:
|
||||
def __init__(
|
||||
self,
|
||||
manager: pywrapcp.RoutingIndexManager,
|
||||
model: pywrapcp.RoutingModel,
|
||||
instance: VrpInstance,
|
||||
distance_evaluators: dict[callable],
|
||||
time_evaluators: dict[callable],
|
||||
solution_callback_fn: Callable[[int, pd.DataFrame], None],
|
||||
stop_callback_fn: callable
|
||||
):
|
||||
self._routing_manager_ref = weakref.ref(manager)
|
||||
self._routing_model_ref = weakref.ref(model)
|
||||
self.objectives = []
|
||||
|
||||
self.instance = instance
|
||||
self.distance_evaluators = distance_evaluators
|
||||
self.time_evaluators = time_evaluators
|
||||
|
||||
self.best_routes = None
|
||||
|
||||
self.solution_callback_fn = solution_callback_fn
|
||||
self.stop_callback_fn = stop_callback_fn
|
||||
|
||||
self._timer = RepeatTimer(10, self._check_terminated)
|
||||
self._timer.start()
|
||||
|
||||
def __call__(self):
|
||||
# current objective value
|
||||
objective = int(self._routing_model_ref().CostVar().Value())
|
||||
if not self.objectives or objective < self.objectives[-1]:
|
||||
self.objectives.append(objective)
|
||||
|
||||
self.best_routes = read_solution(
|
||||
self._routing_manager_ref(), self._routing_model_ref(), self.instance, self.distance_evaluators, self.time_evaluators
|
||||
)
|
||||
|
||||
tmp = self.best_routes
|
||||
tmp = tmp[tmp["num_points"] > 2]
|
||||
|
||||
vpd = defaultdict(set)
|
||||
districts = self.instance.nodes['district'].values
|
||||
for _, row in tmp.iterrows():
|
||||
for p in row['route']:
|
||||
vpd[districts[p]].add(row['vehicle'])
|
||||
|
||||
self.solution_callback_fn(objective, self.best_routes)
|
||||
|
||||
# Num. clean districts: {sum(len(s) == 1 for s in vpd.values())} / {len(vpd.keys())} ")
|
||||
# log.info(f"Objective: {objective} Num. vehicles: {len(tmp)}")
|
||||
|
||||
# self._routing_model_ref().solver().FinishCurrentSearch()
|
||||
|
||||
def _check_terminated(self):
|
||||
"""
|
||||
if self.stop_callback_fn(None):
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
TypeError: SolvesallOptimizationService.vrpOptimization.<locals>.stop_callback_fn() takes 0 positional arguments but 1 was given
|
||||
"""
|
||||
if self.stop_callback_fn():
|
||||
self._timer.cancel()
|
||||
self._routing_model_ref().solver().FinishCurrentSearch()
|
||||
|
||||
|
||||
def solve(instance: VrpInstance, config, time_limit_sec, solution_callback_fn: Callable[[int, pd.DataFrame], None], stop_callback_fn, log: Logger, log_search=False):
|
||||
# with open(f"solve_args_{datetime.now().isoformat()}.pkl", "wb") as f:
|
||||
# pickle.dump((instance, config), f)
|
||||
sys.stdout.flush()
|
||||
assert config.objective in ['distance', 'time']
|
||||
assert instance.nodes.iloc[0]["type"] == "depot", "Depot is expected to be at 0"
|
||||
|
||||
manager = pywrapcp.RoutingIndexManager(
|
||||
len(instance.nodes), len(instance.vehicles), 0
|
||||
)
|
||||
routing = pywrapcp.RoutingModel(manager)
|
||||
|
||||
def create_distance_evaluator(route_type, instance):
|
||||
dist_mat = instance.dist[route_type]
|
||||
base_point = instance.nodes["base_point"].values
|
||||
freq = instance.nodes['freq'].values
|
||||
|
||||
def distance_evaluator(from_node, to_node):
|
||||
dst_node = manager.IndexToNode(to_node)
|
||||
src = base_point[manager.IndexToNode(from_node)]
|
||||
dst = base_point[manager.IndexToNode(to_node)]
|
||||
return round(dist_mat[src, dst])
|
||||
|
||||
return distance_evaluator
|
||||
|
||||
distance_evaluators, distance_evaluators_index = {}, {}
|
||||
for route_type in instance.vehicles["route_type"].unique():
|
||||
distance_evaluators[route_type] = create_distance_evaluator(route_type, instance)
|
||||
distance_evaluators_index[route_type] = routing.RegisterTransitCallback(
|
||||
distance_evaluators[route_type]
|
||||
)
|
||||
|
||||
def create_time_evaluator(route_type, instance):
|
||||
dist_mat = instance.dist[route_type]
|
||||
time_mat = instance.time[route_type]
|
||||
|
||||
base_point = instance.nodes["base_point"].values
|
||||
service_time = instance.nodes["service_time"].values
|
||||
freq = instance.nodes['freq'].values
|
||||
hisa_ids = instance.nodes['hisa_id'].values
|
||||
|
||||
def time_evaluator(from_node, to_node):
|
||||
src_node = manager.IndexToNode(from_node)
|
||||
dst_node = manager.IndexToNode(to_node)
|
||||
src = base_point[manager.IndexToNode(from_node)]
|
||||
dst = base_point[manager.IndexToNode(to_node)]
|
||||
src_hisa_id = hisa_ids[src]
|
||||
dst_hisa_id = hisa_ids[dst]
|
||||
|
||||
# THIS MUST BE IN SYNC WITH Run_optimization_job.save WHERE OPTIMIZATION ROUTE IS CALCULATED!!!
|
||||
time = round(time_mat[src, dst] + freq[src_node] * service_time[src_node])
|
||||
# log.info(f"({src} -> {dst} [{src_hisa_id} -> {dst_hisa_id}] [distance={dist_mat[src, dst]} time={time_mat[src, dst]} freq={freq[src_node]} service_time={service_time[src_node]}] = {time}")
|
||||
return time
|
||||
|
||||
return time_evaluator
|
||||
|
||||
time_evaluators, time_evaluators_index = {}, {}
|
||||
for route_type in instance.vehicles["route_type"].unique():
|
||||
time_evaluators[route_type] = create_time_evaluator(route_type, instance)
|
||||
time_evaluators_index[route_type] = routing.RegisterTransitCallback(
|
||||
time_evaluators[route_type]
|
||||
)
|
||||
|
||||
def create_demand_evaluator(instance):
|
||||
demands = instance.nodes["demand"].values
|
||||
|
||||
def demand_evaluator(from_node):
|
||||
return int(demands[manager.IndexToNode(from_node)])
|
||||
|
||||
return demand_evaluator
|
||||
|
||||
demand_evaluator = create_demand_evaluator(instance)
|
||||
demand_evaluator_index = routing.RegisterUnaryTransitCallback(demand_evaluator)
|
||||
|
||||
routing.AddDimensionWithVehicleTransitAndCapacity(
|
||||
[
|
||||
distance_evaluators_index[route_type]
|
||||
for route_type in instance.vehicles["route_type"]
|
||||
],
|
||||
0,
|
||||
[1000000] * len(instance.vehicles),
|
||||
# [int(x) for x in instance.vehicles["range"]] if not config.set_initial else [1000000] * len(instance.vehicles),
|
||||
True,
|
||||
"Distance",
|
||||
)
|
||||
"""
|
||||
With initial solution we must be aware that is in the feasable space.
|
||||
If it is not in the feasable space the solver can fail because it does not find an initial solution.
|
||||
That's why we will increase the vehicle time constraint to 10 hours, and create a soft penalty.
|
||||
On initial routes max_time constraint on vehicle is overacheived.
|
||||
"""
|
||||
routing.AddDimensionWithVehicleTransitAndCapacity(
|
||||
[
|
||||
time_evaluators_index[route_type]
|
||||
for route_type in instance.vehicles["route_type"]
|
||||
],
|
||||
0,
|
||||
[int(x) for x in instance.vehicles["max_time"]] if not config.set_initial else [1000 * 3600] * len(instance.vehicles),
|
||||
True,
|
||||
"Time",
|
||||
)
|
||||
|
||||
|
||||
|
||||
routing.AddConstantDimension(1, len(instance.nodes), True, "Count")
|
||||
|
||||
count_dimension = routing.GetDimensionOrDie("Count")
|
||||
for vehicle_id in range(len(instance.vehicles)):
|
||||
if instance.vehicles.iloc[vehicle_id]['cost'] == 0:
|
||||
index_end = routing.End(vehicle_id)
|
||||
count_dimension.SetCumulVarSoftLowerBound(index_end, 3, 1_000_000_000)
|
||||
routing.SetVehicleUsedWhenEmpty(True, vehicle_id)
|
||||
|
||||
if config.set_initial:
|
||||
time_dimension = routing.GetDimensionOrDie('Time')
|
||||
for vehicle_id in range(len(instance.vehicles)):
|
||||
index = routing.End(vehicle_id)
|
||||
max_time = int(instance.vehicles.iloc[vehicle_id]['max_time'])
|
||||
time_dimension.SetCumulVarSoftUpperBound(index, max_time, 1_000)
|
||||
|
||||
routing.AddDimensionWithVehicleCapacity(
|
||||
demand_evaluator_index,
|
||||
0,
|
||||
[1000000] * len(instance.vehicles),
|
||||
# [int(x) for x in instance.vehicles["capacity"]],
|
||||
True,
|
||||
"Capacity",
|
||||
)
|
||||
|
||||
# District matching
|
||||
if config.set_initial:
|
||||
log.info("District matching ..")
|
||||
node_to_vehicle = {}
|
||||
district_size = {}
|
||||
for v, route in enumerate(instance.initial_routes):
|
||||
for n in route:
|
||||
node_to_vehicle[n] = v
|
||||
district_size[v] = len(route)
|
||||
|
||||
def district_added_callback(vehicle_id, from_index):
|
||||
from_node = manager.IndexToNode(from_index)
|
||||
|
||||
if from_node == 0: # If node == 0, then it is depo.
|
||||
return 1
|
||||
# Check if node not belongs to vehicle's initial district
|
||||
return 1 if vehicle_id != node_to_vehicle[from_node] else 0
|
||||
|
||||
def district_required_callback(vehicle_id, from_index):
|
||||
from_node = manager.IndexToNode(from_index)
|
||||
|
||||
if from_node == 0: # If node == 0, then it is depo.
|
||||
return 1
|
||||
# Check if node belongs to vehicle's initial district
|
||||
return 1 if vehicle_id == node_to_vehicle[from_node] else 0
|
||||
|
||||
routing.AddDimensionWithVehicleTransitAndCapacity(
|
||||
[routing.RegisterUnaryTransitCallback(partial(district_added_callback, vehicle_id))
|
||||
for vehicle_id in range(len(instance.vehicles))
|
||||
],
|
||||
0,
|
||||
[len(instance.nodes)] * len(instance.vehicles),
|
||||
True,
|
||||
"District_added",
|
||||
)
|
||||
|
||||
routing.AddDimensionWithVehicleTransitAndCapacity(
|
||||
[routing.RegisterUnaryTransitCallback(partial(district_required_callback, vehicle_id))
|
||||
for vehicle_id in range(len(instance.vehicles))
|
||||
],
|
||||
0,
|
||||
[len(instance.nodes)] * len(instance.vehicles),
|
||||
True,
|
||||
"District_required",
|
||||
)
|
||||
|
||||
district_added_dimension = routing.GetDimensionOrDie('District_added')
|
||||
district_required_dimension = routing.GetDimensionOrDie('District_required')
|
||||
|
||||
# Add soft lower bound for each vehicle
|
||||
for vehicle_id in range(len(instance.vehicles)):
|
||||
if vehicle_id not in district_size:
|
||||
continue
|
||||
# len(IR) * (1 - 0.8 (GASPER))
|
||||
added_visits = int(district_size[vehicle_id] * (1 - instance.district_percentage)) # 80 % of district size
|
||||
index = routing.End(vehicle_id)
|
||||
district_added_dimension.SetCumulVarSoftUpperBound(index, added_visits, 10_000)
|
||||
district_required_dimension.SetCumulVarSoftLowerBound(index, 3, 10_000) # District must contains 3 initial points
|
||||
|
||||
# One vehicle per street (or district)
|
||||
|
||||
# if config.district_mode == 'single' and config.district_penalty > 0:
|
||||
# for _, ids in instance.nodes.groupby('district')['id']:
|
||||
# ids = [manager.NodeToIndex(x) for x in ids.values]
|
||||
# assert 0 not in ids, "Depot can't have an assigned district."
|
||||
# routing.AddSoftSameVehicleConstraint(ids, config.district_penalty)
|
||||
# elif config.district_mode == 'subsets' and config.district_penalty > 0:
|
||||
# for _, ids in instance.nodes.groupby('district')['id']:
|
||||
# ids = [manager.NodeToIndex(x) for x in ids.values]
|
||||
# assert 0 not in ids, "Depot can't have an assigned district."
|
||||
# log.info("Building pairwise constraints ...", end="")
|
||||
## sys.stdout.flush()
|
||||
# combs = list(itertools.combinations(ids, 2))[:40]
|
||||
# combs.append(ids)
|
||||
# for subset in combs:
|
||||
# routing.AddSoftSameVehicleConstraint(subset, config.district_penalty)
|
||||
# log.info("finished")
|
||||
# elif config.district_mode == 'hard':
|
||||
# solver = routing.solver()
|
||||
# for _, ids in instance.nodes.groupby('district')['id']:
|
||||
# ids = [manager.NodeToIndex(x) for x in ids.values]
|
||||
#
|
||||
# v0 = routing.VehicleVar(ids[0])
|
||||
# for i in ids[1:]:
|
||||
# solver.Add(v0 == routing.VehicleVar(i))
|
||||
def create_objective_evaluator(route_type, instance):
|
||||
dist_mat = instance.dist[route_type]
|
||||
time_mat = instance.time[route_type]
|
||||
|
||||
base_point = instance.nodes["base_point"].values
|
||||
service_time = instance.nodes["service_time"].values
|
||||
freq = instance.nodes['freq'].values
|
||||
hisa_ids = instance.nodes['hisa_id'].values
|
||||
|
||||
def objective_evaluator(from_node, to_node):
|
||||
src_node = manager.IndexToNode(from_node)
|
||||
dst_node = manager.IndexToNode(to_node)
|
||||
src = base_point[manager.IndexToNode(from_node)]
|
||||
dst = base_point[manager.IndexToNode(to_node)]
|
||||
src_hisa_id = hisa_ids[src]
|
||||
dst_hisa_id = hisa_ids[dst]
|
||||
|
||||
# THIS MUST BE IN SYNC WITH Run_optimization_job.save WHERE OPTIMIZATION ROUTE IS CALCULATED!!!
|
||||
if dist_mat[src, dst] > 3000:
|
||||
penalty = dist_mat[src, dst]
|
||||
else:
|
||||
distance = dist_mat[src, dst]
|
||||
max_distance_sqrt = math.sqrt(3000)
|
||||
penalty = (distance / max_distance_sqrt) ** 2
|
||||
if config.useDistrictCentrality:
|
||||
total_cost = round(time_mat[src, dst] + freq[src_node] * service_time[src_node] + penalty)
|
||||
else:
|
||||
total_cost = round(time_mat[src, dst] + freq[src_node] * service_time[src_node])
|
||||
# log.info(f"({src} -> {dst} [{src_hisa_id} -> {dst_hisa_id}] [distance={dist_mat[src, dst]} time={time_mat[src, dst]} freq={freq[src_node]} service_time={service_time[src_node]}] = {time}")
|
||||
return total_cost
|
||||
|
||||
return objective_evaluator
|
||||
|
||||
objective_evaluators, objective_evaluators_index = {}, {}
|
||||
for route_type in instance.vehicles["route_type"].unique():
|
||||
objective_evaluators[route_type] = create_objective_evaluator(route_type, instance)
|
||||
objective_evaluators_index[route_type] = routing.RegisterTransitCallback(
|
||||
objective_evaluators[route_type]
|
||||
)
|
||||
# Objective
|
||||
if config.objective == 'distance':
|
||||
obj_evaluators_index = distance_evaluators_index
|
||||
obj_dimension = routing.GetDimensionOrDie('Distance')
|
||||
elif config.objective == 'time':
|
||||
obj_evaluators_index = time_evaluators_index
|
||||
obj_dimension = routing.GetDimensionOrDie('Time')
|
||||
obj_evaluators_index = objective_evaluators_index
|
||||
# sum of distances (or travel times)
|
||||
for i, route_type in enumerate(instance.vehicles["route_type"]):
|
||||
routing.SetArcCostEvaluatorOfVehicle(obj_evaluators_index[route_type], i)
|
||||
|
||||
# diff between max and min distance (or travel time)
|
||||
# obj_dimension.SetGlobalSpanCostCoefficient(100)
|
||||
|
||||
# cost per each used vehicle
|
||||
for i, cost in enumerate(instance.vehicles["cost"]):
|
||||
routing.SetFixedCostOfVehicle(int(cost), i)
|
||||
|
||||
solution_callback = SolutionCallback(manager, routing, instance, distance_evaluators, time_evaluators, solution_callback_fn, stop_callback_fn)
|
||||
routing.AddAtSolutionCallback(solution_callback)
|
||||
|
||||
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
|
||||
search_parameters.first_solution_strategy = (
|
||||
routing_enums_pb2.FirstSolutionStrategy.LOCAL_CHEAPEST_COST_INSERTION
|
||||
)
|
||||
search_parameters.local_search_metaheuristic = (
|
||||
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH
|
||||
)
|
||||
search_parameters.time_limit.FromSeconds(time_limit_sec)
|
||||
search_parameters.log_search = log_search
|
||||
|
||||
if config.set_initial:
|
||||
log.info("Initial solution added.")
|
||||
routing.CloseModelWithParameters(search_parameters)
|
||||
initial_solution = routing.ReadAssignmentFromRoutes(instance.initial_routes, True)
|
||||
assert initial_solution is not None, "Initial solution is not feasible."
|
||||
log.info("Initial solution found!")
|
||||
|
||||
solution = routing.SolveFromAssignmentWithParameters(
|
||||
initial_solution, search_parameters
|
||||
)
|
||||
else:
|
||||
solution = routing.SolveWithParameters(search_parameters)
|
||||
|
||||
# Stop callback timer sice we dont need it anymore
|
||||
solution_callback._timer.cancel()
|
||||
|
||||
assert solution, "No solution found."
|
||||
if log_search:
|
||||
debug_solution(instance.vehicles, instance.nodes, manager, routing, solution, log)
|
||||
obj, sol = solution.ObjectiveValue(), solution_callback.best_routes
|
||||
if config.set_initial:
|
||||
debug_solution_overrlapping(instance.initial_routes, sol, log)
|
||||
return obj, sol
|
||||
|
||||
|
||||
def debug_solution(vehicles, points, manager, routing, solution, log: Logger):
|
||||
objectiveValue: float = solution.ObjectiveValue()
|
||||
distanceDimension = routing.GetMutableDimension("Distance")
|
||||
timeDimension = routing.GetMutableDimension("Time")
|
||||
|
||||
log.info(f"Objective value: {objectiveValue}")
|
||||
|
||||
total_time = 0
|
||||
total_distance = 0
|
||||
total_cost = 0
|
||||
for vehicle_idx in range(len(vehicles)):
|
||||
# add first node
|
||||
index = routing.Start(vehicle_idx)
|
||||
node = manager.IndexToNode(index)
|
||||
point = points.iloc[node].to_dict()
|
||||
|
||||
log.info(f"Route for vehicle {vehicle_idx} = {vehicles.iloc[vehicle_idx].to_dict()}:")
|
||||
route_time = 0
|
||||
route_distance = 0
|
||||
route_cost = 0
|
||||
start = True
|
||||
|
||||
while not routing.IsEnd(index):
|
||||
# log.info(f"\t{node} = {point}")
|
||||
|
||||
# Previous info
|
||||
ctime = solution.Value(timeDimension.CumulVar(index))
|
||||
cdistance = solution.Value(distanceDimension.CumulVar(index))
|
||||
|
||||
# Next index
|
||||
previous_index = index
|
||||
index = solution.Value(routing.NextVar(index))
|
||||
|
||||
# Next info
|
||||
ntime = solution.Value(timeDimension.CumulVar(index))
|
||||
ndistance = solution.Value(distanceDimension.CumulVar(index))
|
||||
|
||||
time = ntime - ctime
|
||||
distance = ndistance - cdistance
|
||||
cost = routing.GetArcCostForVehicle(previous_index, index, vehicle_idx)
|
||||
if start:
|
||||
log.info(f"STARTING COST: {cost}")
|
||||
start = False
|
||||
|
||||
# log.info(f"\tCurrent time: {round(time / 3600, 3)}h")
|
||||
# log.info(f"\tCurrent distance: {round(distance, 3)}m")
|
||||
# log.info(f"\tCurrent cost: {round(cost / 3600, 3)}\n")
|
||||
|
||||
route_time += time
|
||||
route_distance += distance
|
||||
route_cost += cost
|
||||
|
||||
node = manager.IndexToNode(index)
|
||||
point = points.iloc[node].to_dict()
|
||||
|
||||
# log.info(f"\t{node} = {point}")
|
||||
log.info(f"Route time: {round(route_time / 3600, 3)}h")
|
||||
log.info(f"Route distance: {round(route_distance, 3)}m")
|
||||
log.info(f"Route cost: {round(route_cost, 3)}\n")
|
||||
|
||||
total_time += route_time
|
||||
total_distance += route_distance
|
||||
total_cost += route_cost
|
||||
|
||||
log.info(f"\nAll routes time: {round(total_time / 3600, 3)}h")
|
||||
log.info(f"All routes distance: {round(total_distance, 3)}m")
|
||||
log.info(f"All routes cost: {round(total_cost, 3)}")
|
||||
|
||||
def debug_solution_overrlapping(initial_routes: list[list[int]], solution: DataFrame, log: Logger):
|
||||
for id, vehicle, type, route, total_distance, total_time, total_cost, num_points in solution.to_records():
|
||||
if len(initial_routes) == id:
|
||||
break
|
||||
initial_route = set(initial_routes[id])
|
||||
route = set(route)
|
||||
crosSection = initial_route.intersection(route)
|
||||
if len(initial_route) > 0:
|
||||
log.info(f"Vehicle {id}. overlappings: {round(100 * len(crosSection) / len(initial_route), 1)}%")
|
71
admiral-worker/app/repos/sql/OptimizationMetricsSqlRepo.py
Normal file
71
admiral-worker/app/repos/sql/OptimizationMetricsSqlRepo.py
Normal file
@ -0,0 +1,71 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import Engine, BLOB
|
||||
from sqlalchemy import PrimaryKeyConstraint
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self, Optional
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core import Utils
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationMetrics import OptimizationMetrics
|
||||
from core.repos.OptimizationMetricsRepo import OptimizationMetricsRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationMetricsSqlRepo(OptimizationMetricsRepo):
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "optimization_metrics"
|
||||
|
||||
__table_args__ = (PrimaryKeyConstraint("optimization_id", "solution", "created_at"),)
|
||||
|
||||
optimization_id: str = Field(foreign_key="optimization.id")
|
||||
solution: int
|
||||
vehicles: int
|
||||
cost: float
|
||||
distance: float
|
||||
duration: float
|
||||
created_at: int
|
||||
overlapping: str = Field(sa_type=BLOB)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: OptimizationMetrics) -> Self:
|
||||
return cls(
|
||||
optimization_id=obj.optimizationId.value,
|
||||
solution=obj.solution,
|
||||
cost=obj.cost,
|
||||
vehicles=obj.vehicles,
|
||||
distance=obj.distance,
|
||||
duration=obj.duration,
|
||||
created_at=obj.createdAt.timestamp(),
|
||||
overlapping=Utils.json_dumps(obj.overlapping).encode('ascii') if obj.overlapping is not None else None,
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[OptimizationMetrics]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[OptimizationMetrics]) -> Optional[OptimizationMetrics]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationMetrics]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(optimization_id=optimizationId.value)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def post(self, optimizationMetrics: OptimizationMetrics):
|
||||
with Session(self.db) as conn:
|
||||
conn.add(self.Table.toRow(optimizationMetrics))
|
||||
conn.commit()
|
117
admiral-worker/app/repos/sql/OptimizationResultSqlRepo.py
Normal file
117
admiral-worker/app/repos/sql/OptimizationResultSqlRepo.py
Normal file
@ -0,0 +1,117 @@
|
||||
import json
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import Engine, BLOB, text
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self, Optional
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core import Utils
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationPoint import OptimizationPoint
|
||||
from core.domain.optimization.OptimizationResult import OptimizationResult
|
||||
from core.domain.optimization.OptimizationRoute import OptimizationRoute
|
||||
from core.repos.OptimizationResultRepo import OptimizationResultRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationResultSqlRepo(OptimizationResultRepo):
|
||||
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "optimization_result"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
optimization_id: str = Field(foreign_key="optimization.id")
|
||||
routes: str = Field(sa_type=BLOB)
|
||||
unvisited: str = Field(sa_type=BLOB)
|
||||
created_at: int
|
||||
info: str
|
||||
authorized_by_user_id: str
|
||||
parent: str
|
||||
|
||||
def toDomain(self) -> OptimizationResult:
|
||||
routes = [OptimizationRoute.fromJson(**x) for x in json.loads(self.routes.decode('utf-8'))]
|
||||
unvisited = [OptimizationPoint.fromJson(**x) for x in json.loads(self.unvisited.decode('utf-8'))] if self.unvisited is not None else []
|
||||
return OptimizationResult(
|
||||
optimizationId=Id(value=uuid.UUID(self.optimization_id)),
|
||||
routes=routes,
|
||||
unvisited=unvisited,
|
||||
createdAt=self.created_at,
|
||||
info=self.info,
|
||||
authorizedByUserId=self.authorized_by_user_id,
|
||||
parent=Id(value=uuid.UUID(self.parent)) if self.parent is not None else None,
|
||||
id=Id(value=uuid.UUID(self.id)),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: OptimizationResult) -> Self:
|
||||
return cls(
|
||||
optimization_id=obj.optimizationId.value,
|
||||
routes=Utils.json_dumps(obj.routes).encode('ascii'),
|
||||
unvisited=Utils.json_dumps(obj.unvisited).encode('ascii') if obj.unvisited is not None else None,
|
||||
created_at=obj.createdAt,
|
||||
info=obj.info,
|
||||
authorized_by_user_id=obj.authorizedByUserId,
|
||||
parent=obj.parent.value if obj.parent is not None else None,
|
||||
id=obj.id.value,
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[OptimizationResult]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[OptimizationResult]) -> Optional[OptimizationResult]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationResult]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(optimization_id=optimizationId.value)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def post(self, optimizationResult: OptimizationResult) -> OptimizationResult:
|
||||
with Session(self.db) as conn:
|
||||
conn.merge(self.Table.toRow(optimizationResult))
|
||||
conn.commit()
|
||||
return optimizationResult
|
||||
|
||||
@override
|
||||
def getLatestByOptimizationId(self, optimizationId: Id[Optimization]) -> Optional[OptimizationResult]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(
|
||||
self.Table
|
||||
).order_by(
|
||||
self.Table.created_at.desc()
|
||||
).limit(
|
||||
1
|
||||
).filter_by(optimization_id=optimizationId.value)
|
||||
|
||||
row = conn.exec(query).one_or_none()
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return row.toDomain()
|
||||
|
||||
@override
|
||||
def getAllIds(self) -> list[Id[OptimizationResult]]:
|
||||
query = text(f"""
|
||||
select optimization_result.id from optimization_result
|
||||
join optimization o on o.id = optimization_result.optimization_id
|
||||
where state not in ('TEST', 'DELETED')
|
||||
""")
|
||||
|
||||
with (Session(self.db) as conn):
|
||||
results = conn.exec(query).all()
|
||||
return [Id(value=row[0]) for row in results]
|
139
admiral-worker/app/repos/sql/OptimizationSqlRepo.py
Normal file
139
admiral-worker/app/repos/sql/OptimizationSqlRepo.py
Normal file
@ -0,0 +1,139 @@
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Engine, update
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationState import OptimizationState
|
||||
from core.domain.optimization.OptimizationType import OptimizationType
|
||||
from core.repos.OptimizationRepo import OptimizationRepo
|
||||
from core.types.Id import Id
|
||||
from core.types.IntId import IntId
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationSqlRepo(OptimizationRepo):
|
||||
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "optimization"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
posta: int
|
||||
title: str
|
||||
description: str
|
||||
optimization_time: float
|
||||
weight: int
|
||||
dates: str
|
||||
created_at: int
|
||||
authorized_by_user_id: str
|
||||
state_changed_at: int
|
||||
use_frequency: bool
|
||||
use_unvisited_crn: bool
|
||||
district_centering: bool
|
||||
static_service_times: int
|
||||
state: str
|
||||
type: str
|
||||
parent: str
|
||||
|
||||
def toDomain(self) -> Optimization:
|
||||
return Optimization(
|
||||
posta=IntId(value=self.posta),
|
||||
title=self.title,
|
||||
description=self.description,
|
||||
weight=self.weight,
|
||||
dates=[datetime.fromtimestamp(int(date)).date() for date in self.dates.split(",") if date.isnumeric()],
|
||||
optimizationTime=timedelta(seconds=self.optimization_time),
|
||||
createdAt=datetime.fromtimestamp(self.created_at),
|
||||
authorizedByUserId=self.authorized_by_user_id,
|
||||
state=OptimizationState(self.state),
|
||||
type=OptimizationType(self.type),
|
||||
useFrequency=self.use_frequency,
|
||||
useUnvisitedCrn=self.use_unvisited_crn,
|
||||
useDistrictCentrality=self.district_centering,
|
||||
stateChangedAt=datetime.fromtimestamp(self.state_changed_at),
|
||||
staticServiceTimes=self.static_service_times,
|
||||
parent=Id(value=uuid.UUID(self.parent)) if self.parent is not None else None,
|
||||
id=Id(value=uuid.UUID(self.id)),
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[Optimization]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[Optimization]) -> Optional[Optimization]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
def getWithState(self, state: OptimizationState) -> list[Optimization]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(state=state.value)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def updateFirst(self, fromState: OptimizationState, toState: OptimizationState) -> Optional[Optimization]:
|
||||
with Session(self.db) as conn:
|
||||
|
||||
# Get candidate for update
|
||||
selectQuery = select(self.Table).filter_by(state=fromState.value).limit(1)
|
||||
row = conn.exec(selectQuery).one_or_none()
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
# Update candidate but only if his state is still unchanged
|
||||
updateQuery = update(self.Table).filter_by(state=fromState.value, id=row.id).values(state=toState.value,
|
||||
state_changed_at=datetime.now().timestamp())
|
||||
|
||||
# If candidate was updated before this update break the transaction
|
||||
if conn.exec(updateQuery).rowcount != 1:
|
||||
return None
|
||||
|
||||
# Again get updated candidate
|
||||
selectQuery = select(self.Table).filter_by(id=row.id).limit(1)
|
||||
row = conn.exec(selectQuery).one_or_none()
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
# Commit changes
|
||||
conn.commit()
|
||||
return row.toDomain()
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def setState(self, id: Id[Optimization], toState: OptimizationState) -> Optional[Optimization]:
|
||||
with Session(self.db) as conn:
|
||||
updateQuery = update(self.Table).filter_by(id=id.value).values(state=toState.value, state_changed_at=datetime.now().timestamp())
|
||||
conn.exec(updateQuery)
|
||||
conn.commit()
|
||||
|
||||
@override
|
||||
def getLatestConfirmedByPosta(self, posta: int) -> Optional[Optimization]:
|
||||
with (Session(self.db) as conn):
|
||||
query = select(
|
||||
self.Table
|
||||
).order_by(
|
||||
self.Table.state_changed_at.desc()
|
||||
).limit(
|
||||
1
|
||||
).filter_by(
|
||||
posta=posta, state=OptimizationState.CONFIRMED.value
|
||||
)
|
||||
|
||||
row = conn.exec(query).one_or_none()
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return row.toDomain()
|
70
admiral-worker/app/repos/sql/OptimizationVehicleSqlRepo.py
Normal file
70
admiral-worker/app/repos/sql/OptimizationVehicleSqlRepo.py
Normal file
@ -0,0 +1,70 @@
|
||||
import json
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Engine
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationVehicle import OptimizationVehicle
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
from core.repos.OptimizationVehicleRepo import OptimizationVehicleRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationVehicleSqlRepo(OptimizationVehicleRepo):
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "optimization_vehicle"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
optimization_id: str = Field(foreign_key="optimization.id")
|
||||
name: str
|
||||
type: str
|
||||
capacity: int
|
||||
range: float # kilometers
|
||||
min_quantity: int
|
||||
max_quantity: int
|
||||
delivery_time: float # hours
|
||||
average_speed: float
|
||||
max_speed: float
|
||||
districts: str
|
||||
|
||||
def toDomain(self) -> OptimizationVehicle:
|
||||
return OptimizationVehicle(
|
||||
optimizationId=Id(value=uuid.UUID(self.optimization_id)),
|
||||
name=self.name,
|
||||
type=TransportMode(self.type),
|
||||
capacity=self.capacity,
|
||||
range=self.range,
|
||||
minQuantity=self.min_quantity,
|
||||
maxQuantity=self.max_quantity,
|
||||
deliveryTime=self.delivery_time,
|
||||
averageSpeed=self.average_speed,
|
||||
maxSpeed=self.max_speed,
|
||||
districts=self.districts,
|
||||
id=Id(value=uuid.UUID(self.id))
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[OptimizationVehicle]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[OptimizationVehicle]) -> Optional[OptimizationVehicle]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationVehicle]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(optimization_id=optimizationId.value)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
73
admiral-worker/app/repos/sql/WorkerJobLogSqlRepo.py
Normal file
73
admiral-worker/app/repos/sql/WorkerJobLogSqlRepo.py
Normal file
@ -0,0 +1,73 @@
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Engine
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core.domain.worker.WorkerJob import WorkerJob
|
||||
from core.domain.worker.WorkerLog import WorkerLog
|
||||
from core.domain.worker.WorkerLogLevel import WorkerLogLevel
|
||||
from core.repos.WorkerJobLogRepo import WorkerJobLogRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerJobLogSqlRepo(WorkerJobLogRepo):
|
||||
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "worker_job_log"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
context: str
|
||||
data: str
|
||||
worker_job_id: str = Field(foreign_key="worker_job.id")
|
||||
created_at: float
|
||||
level: str
|
||||
|
||||
def toDomain(self) -> WorkerLog:
|
||||
return WorkerLog(
|
||||
context=self.context,
|
||||
data=self.data,
|
||||
ownerId=Id(value=uuid.UUID(self.worker_job_id)),
|
||||
createdAt=self.created_at,
|
||||
level=WorkerLogLevel(self.level),
|
||||
id=Id(value=uuid.UUID(self.id))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: WorkerLog) -> Self:
|
||||
return cls(
|
||||
context=obj.context,
|
||||
data=obj.data,
|
||||
worker_job_id=obj.ownerId.value,
|
||||
created_at=obj.createdAt,
|
||||
level=obj.level.value,
|
||||
id=obj.id.value,
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[WorkerLog]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def post(self, context: str, workerJobId: Id[WorkerJob], data: str, level: WorkerLogLevel) -> WorkerLog:
|
||||
obj = WorkerLog(context=context, data=data, ownerId=workerJobId, level=level)
|
||||
with Session(self.db) as conn:
|
||||
conn.add(self.Table.toRow(obj))
|
||||
conn.commit()
|
||||
return obj
|
67
admiral-worker/app/repos/sql/WorkerJobSqlRepo.py
Normal file
67
admiral-worker/app/repos/sql/WorkerJobSqlRepo.py
Normal file
@ -0,0 +1,67 @@
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self
|
||||
from sqlalchemy.engine import Engine
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core.domain.optimization.OptimizationState import OptimizationState
|
||||
from core.domain.worker.WorkerJob import WorkerJob
|
||||
from core.repos.WorkerJobRepo import WorkerJobRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerJobSqlRepo(WorkerJobRepo):
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "worker_job"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
optimization_id: str = Field(foreign_key="optimization.id")
|
||||
worker_id: str = Field(foreign_key="worker.id")
|
||||
name: str
|
||||
state: str
|
||||
|
||||
def toDomain(self) -> WorkerJob:
|
||||
return WorkerJob(
|
||||
id=Id(value=uuid.UUID(self.id)),
|
||||
optimizationId=Id(value=uuid.UUID(self.optimization_id)),
|
||||
workerId=Id(value=uuid.UUID(self.worker_id)),
|
||||
name=self.name,
|
||||
state=OptimizationState(self.state)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: WorkerJob) -> Self:
|
||||
return cls(
|
||||
id=obj.id.value,
|
||||
optimization_id=obj.optimizationId.value,
|
||||
worker_id=obj.workerId.value,
|
||||
name=obj.name,
|
||||
state=obj.state.value,
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[WorkerJob]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[WorkerJob]) -> Optional[WorkerJob]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def post(self, obj: WorkerJob):
|
||||
with Session(self.db) as conn:
|
||||
conn.add(self.Table.toRow(obj))
|
||||
conn.commit()
|
||||
return obj
|
68
admiral-worker/app/repos/sql/WorkerJobStatusSqlRepo.py
Normal file
68
admiral-worker/app/repos/sql/WorkerJobStatusSqlRepo.py
Normal file
@ -0,0 +1,68 @@
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core.domain.worker.WorkerJobStatus import WorkerJobStatus
|
||||
from core.domain.worker.WorkerJob import WorkerJob
|
||||
from core.repos.WorkerJobStatusRepo import WorkerJobStatusRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerJobStatusSqlRepo(WorkerJobStatusRepo):
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "worker_job_status"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
worker_job_id: str = Field(foreign_key="worker_job.id")
|
||||
ram_taken: float
|
||||
cpu_utilization: float
|
||||
created_at: int
|
||||
|
||||
def toDomain(self) -> WorkerJobStatus:
|
||||
return WorkerJobStatus(
|
||||
ownerId=Id(value=uuid.UUID(self.worker_job_id)),
|
||||
ramTaken=self.ram_taken,
|
||||
cpuUtilization=self.cpu_utilization,
|
||||
createdAt=self.created_at,
|
||||
id=Id(value=uuid.UUID(self.id))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: WorkerJobStatus) -> Self:
|
||||
return cls(
|
||||
worker_job_id=obj.ownerId.value,
|
||||
ram_taken=obj.ramTaken,
|
||||
cpu_utilization=obj.cpuUtilization,
|
||||
created_at=obj.createdAt,
|
||||
id=obj.id.value
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[WorkerJobStatus]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[WorkerJobStatus]) -> Optional[WorkerJobStatus]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def post(self, workerJobId: Id[WorkerJob], ramTaken: float, cpuUtilization: float) -> WorkerJobStatus:
|
||||
obj = WorkerJobStatus(ownerId=workerJobId, ramTaken=ramTaken, cpuUtilization=cpuUtilization)
|
||||
with Session(self.db) as conn:
|
||||
conn.add(self.Table.toRow(obj))
|
||||
conn.commit()
|
||||
return obj
|
72
admiral-worker/app/repos/sql/WorkerLogSqlRepo.py
Normal file
72
admiral-worker/app/repos/sql/WorkerLogSqlRepo.py
Normal file
@ -0,0 +1,72 @@
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.domain.worker.WorkerLog import WorkerLog
|
||||
from core.domain.worker.WorkerLogLevel import WorkerLogLevel
|
||||
from core.repos.WorkerLogRepo import WorkerLogRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerLogSqlRepo(WorkerLogRepo):
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "worker_log"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
context: str
|
||||
data: str
|
||||
worker_id: str = Field(foreign_key="worker.id")
|
||||
level: str
|
||||
created_at: float
|
||||
|
||||
def toDomain(self) -> WorkerLog:
|
||||
return WorkerLog(
|
||||
context=self.context,
|
||||
data=self.data,
|
||||
ownerId=Id(value=uuid.UUID(self.worker_id)),
|
||||
createdAt=self.created_at,
|
||||
level=WorkerLogLevel(self.level),
|
||||
id=Id(value=uuid.UUID(self.id))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: WorkerLog) -> Self:
|
||||
return cls(
|
||||
context=obj.context,
|
||||
data=obj.data,
|
||||
worker_id=obj.ownerId.value,
|
||||
created_at=obj.createdAt,
|
||||
level=obj.level.value,
|
||||
id=obj.id.value,
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[WorkerLog]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def post(self, context: str, workerId: Id[Worker], data: str, level: WorkerLogLevel) -> WorkerLog:
|
||||
obj = WorkerLog(context=context, data=data, ownerId=workerId, level=level)
|
||||
with Session(self.db) as conn:
|
||||
conn.add(self.Table.toRow(obj))
|
||||
conn.commit()
|
||||
return obj
|
83
admiral-worker/app/repos/sql/WorkerSqlRepo.py
Normal file
83
admiral-worker/app/repos/sql/WorkerSqlRepo.py
Normal file
@ -0,0 +1,83 @@
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.domain.worker.WorkerState import WorkerState
|
||||
from core.domain.worker.WorkerType import WorkerType
|
||||
from core.repos.WorkerRepo import WorkerRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerSqlRepo(WorkerRepo):
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "worker"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
type: str
|
||||
ip: str
|
||||
state: str
|
||||
|
||||
def toDomain(self) -> Worker:
|
||||
return Worker(
|
||||
ip=self.ip,
|
||||
type=WorkerType(self.type),
|
||||
state=WorkerState(self.state),
|
||||
id=Id(value=uuid.UUID(self.id))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: Worker) -> Self:
|
||||
return cls(
|
||||
ip=obj.ip,
|
||||
type=obj.type,
|
||||
id=obj.id.value,
|
||||
state=obj.state.value
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[Worker]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[Worker]) -> Optional[Worker]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
def post(self, ip: str, type: WorkerType) -> Worker:
|
||||
worker = Worker(ip=ip, type=type, state=WorkerState.NORMAL)
|
||||
with Session(self.db) as conn:
|
||||
conn.add(self.Table.toRow(worker))
|
||||
conn.commit()
|
||||
return worker
|
||||
|
||||
@override
|
||||
def getByIp(self, ip: str, type: WorkerType) -> Optional[Worker]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(ip=ip, type=type)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@override
|
||||
@dbRetry
|
||||
def deleteByIp(self, ip: str, type: WorkerType) -> int:
|
||||
with Session(self.db) as conn:
|
||||
query = delete(self.Table).filter_by(ip=ip, type=type)
|
||||
result = conn.exec(query).rowcount
|
||||
conn.commit()
|
||||
|
||||
return result
|
69
admiral-worker/app/repos/sql/WorkerStatusSqlRepo.py
Normal file
69
admiral-worker/app/repos/sql/WorkerStatusSqlRepo.py
Normal file
@ -0,0 +1,69 @@
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from typing_extensions import Self
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override
|
||||
|
||||
from app.repos.sql import dbRetry
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.domain.worker.WorkerStatus import WorkerStatus
|
||||
from core.repos.WorkerStatusRepo import WorkerStatusRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerStatusSqlRepo(WorkerStatusRepo):
|
||||
db: Engine
|
||||
|
||||
class Table(SQLModel, table=True):
|
||||
__tablename__ = "worker_status"
|
||||
|
||||
id: str = Field(primary_key=True)
|
||||
worker_id: str = Field(foreign_key="worker.id")
|
||||
ram_available: float
|
||||
cpu_utilization: float
|
||||
created_at: int
|
||||
|
||||
def toDomain(self) -> WorkerStatus:
|
||||
return WorkerStatus(
|
||||
ownerId=Id(value=uuid.UUID(self.worker_id)),
|
||||
ramAvailable=self.ram_available,
|
||||
cpuUtilization=self.cpu_utilization,
|
||||
createdAt=self.created_at,
|
||||
id=Id(value=uuid.UUID(self.id))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def toRow(cls, obj: WorkerStatus) -> Self:
|
||||
return cls(
|
||||
worker_id=obj.ownerId.value,
|
||||
ram_available=obj.ramAvailable,
|
||||
cpu_utilization=obj.cpuUtilization,
|
||||
created_at=obj.createdAt,
|
||||
id=obj.id.value
|
||||
)
|
||||
|
||||
@override
|
||||
def getAll(self) -> list[WorkerStatus]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table)
|
||||
return [row.toDomain() for row in conn.exec(query).all()]
|
||||
|
||||
@override
|
||||
def get(self, id: Id[WorkerStatus]) -> Optional[WorkerStatus]:
|
||||
with Session(self.db) as conn:
|
||||
query = select(self.Table).filter_by(id=id.value)
|
||||
row = conn.exec(query).one_or_none()
|
||||
return row.toDomain() if row is not None else None
|
||||
|
||||
@dbRetry
|
||||
@override
|
||||
def post(self, workerId: Id[Worker], ramAvailable: float, cpuUtilization: float) -> WorkerStatus:
|
||||
workerStatus = WorkerStatus(ownerId=workerId, ramAvailable=ramAvailable, cpuUtilization=cpuUtilization)
|
||||
with Session(self.db) as conn:
|
||||
conn.add(self.Table.toRow(workerStatus))
|
||||
conn.commit()
|
||||
return workerStatus
|
21
admiral-worker/app/repos/sql/__init__.py
Normal file
21
admiral-worker/app/repos/sql/__init__.py
Normal file
@ -0,0 +1,21 @@
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from time import sleep
|
||||
from sqlalchemy.exc import DBAPIError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def dbRetry(func: Callable, max_retries=60, delay=5, exceptions=(DBAPIError,)):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except exceptions as e:
|
||||
log.warning(f"DB function {func.__name__} attempt {i + 1} failed with: {e}")
|
||||
sleep(delay)
|
||||
raise Exception("DB operation retry limit reached!")
|
||||
|
||||
return wrapper
|
52
admiral-worker/app/services/FsFtpService.py
Normal file
52
admiral-worker/app/services/FsFtpService.py
Normal file
@ -0,0 +1,52 @@
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from typing_extensions import override
|
||||
|
||||
from core.extend import fs
|
||||
from core.services.FtpService import FtpService
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FsFtpService(FtpService):
|
||||
|
||||
@override
|
||||
def download(self, path: Path):
|
||||
ftpPath = fs.getFtpPath(path.name)
|
||||
log.info(f"Download: '{path.name}' to '{path}'")
|
||||
shutil.copyfile(src=ftpPath, dst=path)
|
||||
|
||||
@override
|
||||
def upload(self, path: Path):
|
||||
ftpPath = fs.getFtpPath(path.name)
|
||||
log.info(f"Upload: '{path}' to '{path.name}'")
|
||||
shutil.copyfile(src=path, dst=ftpPath)
|
||||
|
||||
@override
|
||||
def rename(self, oldPath: Path, newPath: Path):
|
||||
newFtpPath = fs.getFtpPath(newPath.name)
|
||||
oldFtpPath = fs.getFtpPath(oldPath.name)
|
||||
log.info(f"Rename: '{oldPath.name}' to '{newPath.name}'")
|
||||
shutil.move(src=oldFtpPath, dst=newFtpPath)
|
||||
|
||||
@override
|
||||
def delete(self, path: Path):
|
||||
ftpPath = fs.getFtpPath(path.name)
|
||||
log.info(f"Delete: '{path.name}'")
|
||||
ftpPath.unlink(missing_ok=True)
|
||||
|
||||
@override
|
||||
def copy(self, path: Path, newPath: Path):
|
||||
newFtpPath = fs.getFtpPath(newPath.name)
|
||||
oldFtpPath = fs.getFtpPath(path.name)
|
||||
log.info(f"Copy: '{path.name}' to '{newPath.name}'")
|
||||
shutil.copyfile(src=oldFtpPath, dst=newFtpPath)
|
||||
|
||||
@override
|
||||
def scan(self) -> list[Path]:
|
||||
ftpPath = fs.getFtpPath()
|
||||
return list(ftpPath.iterdir())
|
77
admiral-worker/app/services/FtputilFtpService.py
Normal file
77
admiral-worker/app/services/FtputilFtpService.py
Normal file
@ -0,0 +1,77 @@
|
||||
import logging
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from ftputil import FTPHost
|
||||
from typing_extensions import override
|
||||
|
||||
from core.services.FtpService import FtpService
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FtputilFtpService(FtpService):
|
||||
|
||||
domain: str
|
||||
username: str
|
||||
password: str
|
||||
port: str
|
||||
|
||||
class Progress:
|
||||
def __init__(self, size: int):
|
||||
self.allBytes: int = size
|
||||
self.transferedBytes: int = 0
|
||||
self.startTime = time.time()
|
||||
self.i = 0
|
||||
|
||||
def __call__(self, chunk: bytes):
|
||||
self.i += 1
|
||||
|
||||
chunkBytes = len(chunk)
|
||||
self.transferedBytes += chunkBytes
|
||||
|
||||
if self.i % 10 == 0:
|
||||
duration = time.time() - self.startTime
|
||||
progress = self.transferedBytes / self.allBytes * 100
|
||||
numChunksLeft = (self.allBytes - self.transferedBytes) / chunkBytes
|
||||
timeLeft = (duration * numChunksLeft) / 60
|
||||
log.info(", ".join([
|
||||
f"FTP progress: {round(progress, 1)}%",
|
||||
f"Transferred: ({round(self.transferedBytes / 1e6, 1)} / {round(self.allBytes / 1e6, 1)}) MB",
|
||||
f"Time left: {round(timeLeft, 2)} minutes"
|
||||
]))
|
||||
|
||||
self.startTime = time.time()
|
||||
|
||||
def __post_init__(self):
|
||||
self.ftp = FTPHost(self.domain, self.username, self.password, self.port)
|
||||
|
||||
@override
|
||||
def download(self, path: Path):
|
||||
log.info(f"Download: '{path.name}' to '{path}'")
|
||||
# Download some files from the login directory.
|
||||
self.ftp.download(source=path.name, target=path, callback=self.Progress(size=self.ftp.stat(path.name).st_size))
|
||||
|
||||
@override
|
||||
def upload(self, path: Path):
|
||||
log.info(f"Upload: '{path}' to '{path.name}'")
|
||||
self.ftp.upload(source=path, target=path.name, callback=self.Progress(size=path.stat().st_size))
|
||||
|
||||
@override
|
||||
def rename(self, oldPath: Path, newPath: Path):
|
||||
log.info(f"Rename: '{oldPath.name}' to '{newPath.name}'")
|
||||
self.ftp.rename(source=oldPath.name, target=newPath.name)
|
||||
|
||||
@override
|
||||
def delete(self, path: Path):
|
||||
log.info(f"Delete: '{path.name}'")
|
||||
self.ftp.remove(path=path.name)
|
||||
|
||||
@override
|
||||
def copy(self, path: Path, newPath: Path):
|
||||
log.info(f"Copy: '{path.name}' to '{newPath.name}'")
|
||||
with self.ftp.open(path.name, "rb") as source:
|
||||
with self.ftp.open(newPath.name, "wb") as target:
|
||||
self.ftp.copyfileobj(source=source, target=target)
|
131
admiral-worker/app/services/OsrmRoutingService.py
Normal file
131
admiral-worker/app/services/OsrmRoutingService.py
Normal file
@ -0,0 +1,131 @@
|
||||
import logging
|
||||
from random import random
|
||||
|
||||
import numpy as np
|
||||
import requests
|
||||
from typing_extensions import override
|
||||
|
||||
from core.domain.map.GeoLocation import GeoLocation
|
||||
from core.domain.map.RouteInfo import RouteInfo
|
||||
from core.domain.map.RouteMatrix import RouteMatrix
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
from core.services.RoutingService import RoutingService
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OsrmRoutingService(RoutingService):
|
||||
|
||||
|
||||
def __init__(self, domain: str):
|
||||
self.domain = domain
|
||||
|
||||
def __getCoordinates(self, locations : list[GeoLocation]) -> str:
|
||||
coordinates = []
|
||||
for location in locations:
|
||||
coordinates.append(f"{location.lon},{location.lat}")
|
||||
return ";".join(coordinates)
|
||||
|
||||
@override
|
||||
def getRouteMatrix(self, geoLocations: list[GeoLocation], transportMode: TransportMode) -> RouteMatrix:
|
||||
coordinates = self.__getCoordinates(locations=[gl for gl in geoLocations])
|
||||
port, profile = self.__getProfile(transportMode=transportMode)
|
||||
|
||||
res = requests.get(url=f"{self.domain}:{port}/table/v1/{profile}/{coordinates}", params=dict(annotations="distance,duration"))
|
||||
|
||||
if res.status_code != 200:
|
||||
raise Exception(f"OSRM routing engine failed to create matrix: {res.text}")
|
||||
|
||||
matrixes = res.json()
|
||||
|
||||
return RouteMatrix.init(
|
||||
distances=np.matrix(matrixes['distances'], dtype=np.float32),
|
||||
durations=np.matrix(matrixes['durations'], dtype=np.float32)
|
||||
)
|
||||
|
||||
@override
|
||||
def getRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation]) -> RouteInfo:
|
||||
coordinates = self.__getCoordinates(locations=legs)
|
||||
port, profile = self.__getProfile(transportMode=transportMode)
|
||||
res = requests.get(
|
||||
url=f"{self.domain}:{port}/route/v1/{profile}/{coordinates}",
|
||||
params=dict(
|
||||
geometries="geojson",
|
||||
alternatives='false',
|
||||
steps='true',
|
||||
continue_straight='false',
|
||||
)
|
||||
)
|
||||
if res.status_code != 200:
|
||||
raise Exception(f"OSRM routing engine failed to find route: {res.text}")
|
||||
|
||||
data = res.json()
|
||||
route = data['routes'][0]
|
||||
|
||||
steps = [legs[0]]
|
||||
for i, leg in enumerate(route['legs']):
|
||||
legSteps = [legs[i]]
|
||||
for step in leg['steps']:
|
||||
legSteps += [GeoLocation(lat=c[1], lon=c[0]) for c in step['geometry']['coordinates']]
|
||||
legSteps.append(legs[i + 1])
|
||||
steps += legSteps
|
||||
|
||||
return RouteInfo(
|
||||
distance=route['distance'],
|
||||
duration=route['duration'],
|
||||
steps=steps
|
||||
)
|
||||
|
||||
@override
|
||||
def getAverageRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation], probability: list[float], iterations: int) -> RouteInfo:
|
||||
averageDistance = 0
|
||||
averageDuration = 0
|
||||
for i in range(iterations):
|
||||
randomLegs: list[GeoLocation] = []
|
||||
for leg in legs:
|
||||
if random() < probability[legs.index(leg)]:
|
||||
randomLegs.append(leg)
|
||||
if len(randomLegs) < 2:
|
||||
randomLegs = legs
|
||||
routeInfo = self.getRouteInfo(transportMode=transportMode, legs=randomLegs)
|
||||
averageDistance += routeInfo.distance
|
||||
averageDuration += routeInfo.duration
|
||||
|
||||
return RouteInfo(
|
||||
distance=averageDistance / iterations,
|
||||
duration=averageDuration / iterations,
|
||||
steps=[]
|
||||
)
|
||||
|
||||
def __getProfile(self, transportMode: TransportMode) -> tuple[int, str]:
|
||||
match transportMode:
|
||||
case TransportMode.BIKE:
|
||||
return 5000, 'bike'
|
||||
case TransportMode.CAR:
|
||||
return 5001, 'car'
|
||||
case TransportMode.EV:
|
||||
return 5002, 'ev'
|
||||
case TransportMode.KM:
|
||||
return 5003, 'km'
|
||||
case TransportMode.KPM:
|
||||
return 5004, 'kpm'
|
||||
case TransportMode.MK:
|
||||
return 5005, 'mk'
|
||||
case TransportMode.WALK:
|
||||
return 5006, 'walk'
|
||||
case _:
|
||||
raise Exception(f"Mapping for transport mode does not exists: {transportMode.value}")
|
||||
|
||||
def _getPolyline(self, transportMode: TransportMode, legs: list[GeoLocation]) -> str:
|
||||
coordinates = ";".join([f"{l.lon},{l.lat}" for l in legs])
|
||||
port, profile = self.__getProfile(transportMode=transportMode)
|
||||
res = requests.get(
|
||||
url=f"{self.domain}:{port}/route/v1/{profile}/{coordinates}",
|
||||
params=dict(
|
||||
geometries="polyline",
|
||||
alternatives='false',
|
||||
steps='true',
|
||||
continue_straight='false',
|
||||
)
|
||||
)
|
||||
return res.json()['routes'][0]['geometry']
|
59
admiral-worker/app/services/PsutilSystemService.py
Normal file
59
admiral-worker/app/services/PsutilSystemService.py
Normal file
@ -0,0 +1,59 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from typing_extensions import override
|
||||
|
||||
import psutil
|
||||
import requests
|
||||
from urllib3.exceptions import NameResolutionError
|
||||
|
||||
from core.services.SystemService import SystemService
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PsutilSystemService(SystemService):
|
||||
@override
|
||||
def getMaxRamMbAvailable(self) -> float:
|
||||
return psutil.virtual_memory().total / 10**6
|
||||
|
||||
@override
|
||||
def getIp(self) -> str:
|
||||
return requests.get('https://checkip.amazonaws.com').text.strip()
|
||||
|
||||
@override
|
||||
def getCpuUtilization(self) -> float:
|
||||
return psutil.cpu_percent(interval=None)
|
||||
|
||||
@override
|
||||
def getRamMbAvailable(self) -> float:
|
||||
return psutil.virtual_memory().available / 10**6
|
||||
|
||||
@override
|
||||
def getCpuAvailable(self) -> int:
|
||||
count = 0
|
||||
for utilization in psutil.cpu_percent(percpu=True):
|
||||
if utilization < 25:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
@override
|
||||
def getProcessCpu(self, pid: int = None) -> Optional[float]:
|
||||
proc = psutil.Process(pid=pid)
|
||||
return proc.cpu_percent(interval=None)
|
||||
|
||||
@override
|
||||
def getProcessRam(self, pid: int = None) -> Optional[float]:
|
||||
proc = psutil.Process(pid=pid)
|
||||
return proc.memory_info().rss / 10**6
|
||||
|
||||
@override
|
||||
def killProcess(self, pid: int = None):
|
||||
proc = psutil.Process(pid=pid)
|
||||
proc.kill()
|
||||
|
||||
@override
|
||||
def terminateProcess(self, pid: int = None):
|
||||
proc = psutil.Process(pid=pid)
|
||||
proc.terminate()
|
550
admiral-worker/app/services/SolvesallOptimizationService.py
Normal file
550
admiral-worker/app/services/SolvesallOptimizationService.py
Normal file
@ -0,0 +1,550 @@
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
from typing import Callable, Literal, Optional
|
||||
|
||||
import numpy as np
|
||||
from sklearn.neighbors import BallTree
|
||||
|
||||
from app.algorithms.OrToolsOptimizationService import OrToolsOptimizationVehicle, OrToolsOptimizationPoint, OrToolsOptimizationInstance, \
|
||||
OrToolsOptimizationSolution, OrToolsOptimizationService, OrToolsOptimizationConfig
|
||||
from core.Utils import percentage
|
||||
from core.domain.map.CrnPoint import CrnPoint
|
||||
from core.domain.map.GeoLocation import GeoLocation
|
||||
from core.domain.map.RouteMatrix import RouteMatrix
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationPoint import OptimizationPoint
|
||||
from core.domain.optimization.OptimizationPointType import OptimizationPointType
|
||||
from core.domain.optimization.OptimizationResultData import OptimizationResultData
|
||||
from core.domain.optimization.OptimizationSolution import OptimizationSolution
|
||||
from core.domain.optimization.OptimizationType import OptimizationType
|
||||
from core.domain.optimization.OptimizationVehicle import OptimizationVehicle
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
from core.services.OptimizationService import OptimizationService
|
||||
from core.types.Logger import Logger
|
||||
|
||||
|
||||
class SolvesallOptimizationService(OptimizationService):
|
||||
|
||||
def config(self, setInitial: bool, district_centering: bool) -> OrToolsOptimizationConfig:
|
||||
return OrToolsOptimizationConfig(
|
||||
district_mode='subsets',
|
||||
district_penalty=0,
|
||||
vehicle_cost=16 * 3600, # Two working days.
|
||||
set_initial=setInitial,
|
||||
useDistrictCentrality=district_centering,
|
||||
)
|
||||
|
||||
def vrpOptimization(
|
||||
self,
|
||||
optimization: Optimization,
|
||||
optimizationVehicles: list[OptimizationVehicle],
|
||||
optimizationPoints: list[OptimizationPoint],
|
||||
routeMatrices: dict[TransportMode, RouteMatrix],
|
||||
solutionCallback: Callable[[int, list[OptimizationSolution], bool, list[OptimizationPoint], Optional[dict[int, float]]], None],
|
||||
terminationCallback: Callable[[], bool],
|
||||
log: Logger,
|
||||
initialOptimizationResultData: Optional[OptimizationResultData] = None
|
||||
):
|
||||
config = self.config(setInitial=initialOptimizationResultData is not None, district_centering=optimization.useDistrictCentrality)
|
||||
crn_initialDistrict: dict[int, str] = {}
|
||||
initialOptimizationPoints: list[OptimizationPoint] = []
|
||||
initialRoutePointBallTree: Optional[BallTree] = None
|
||||
if config.set_initial:
|
||||
log.info('Setting optimization mode to initial solution.')
|
||||
log.info('Creating crn_initialDistrict map and initial optimization points ball tree.')
|
||||
for initialRoute in initialOptimizationResultData.optimizationResult.routes:
|
||||
for initialRoutePoint in initialRoute.points:
|
||||
if initialRoutePoint.crnPoint.hisa != 0:
|
||||
initialOptimizationPoints.append(initialRoutePoint)
|
||||
crn_initialDistrict[initialRoutePoint.crnPoint.hisa] = initialRoute.name
|
||||
initialRoutePointBallTree = BallTree([iop.crnPoint.location.ballVector for iop in initialOptimizationPoints], metric='haversine')
|
||||
|
||||
log.info('Mapping optimization points')
|
||||
orToolsOptimizationPoints: list[OrToolsOptimizationPoint] = []
|
||||
for i, point in enumerate(optimizationPoints):
|
||||
# Construct OrToolsOptimizationPoint list
|
||||
crnPoint = point.crnPoint
|
||||
microLocation = crnPoint.microLocation
|
||||
district = None if crnPoint.district == '' else crnPoint.district
|
||||
orPoint = OrToolsOptimizationPoint(
|
||||
id=i,
|
||||
hisa_id=str(crnPoint.hisa),
|
||||
service_time_sec=int(point.serviceTime.total_seconds()),
|
||||
demand=point.demand,
|
||||
freq=point.visitFrequency,
|
||||
type=self.__crn_type(point.type),
|
||||
lat=microLocation.lat,
|
||||
lon=microLocation.lon,
|
||||
district=district
|
||||
)
|
||||
orToolsOptimizationPoints.append(orPoint)
|
||||
|
||||
if crnPoint.hisa != 0:
|
||||
# Insert additional crn points which does not exists in initial routes to crn_initialDistrict
|
||||
initialDistrict = crn_initialDistrict.get(point.crnPoint.hisa, None)
|
||||
if initialDistrict is None and config.set_initial:
|
||||
ballVector = GeoLocation(lat=orPoint.lat, lon=orPoint.lon).ballVector
|
||||
nearestInitialPointsIndex = initialRoutePointBallTree.query([ballVector], k=1, return_distance=False)[0][0]
|
||||
nearestInitialCrn = initialOptimizationPoints[nearestInitialPointsIndex].crnPoint.hisa
|
||||
nearestInitialDistrict = crn_initialDistrict[nearestInitialCrn]
|
||||
crn_initialDistrict[crnPoint.hisa] = nearestInitialDistrict
|
||||
log.warning(f"Crn point '{crnPoint.hisa}' is missing in initial routes, nearest crn district: {nearestInitialDistrict}")
|
||||
|
||||
# Log first 10 points
|
||||
if i < 10:
|
||||
log.info(orPoint)
|
||||
|
||||
log.info('Mapping optimization vehicles')
|
||||
orToolsOptimizationVehicles: list[OrToolsOptimizationVehicle] = []
|
||||
optimizationVehicleAll: list[OptimizationVehicle] = []
|
||||
tempOrVehicleIndex_district: dict[int, str] = {}
|
||||
orVehicleIndex_district: dict[int, str] = {}
|
||||
for vehicle in optimizationVehicles:
|
||||
districts = vehicle.districts.split(",")
|
||||
for i in range(vehicle.maxQuantity):
|
||||
orVehicle = OrToolsOptimizationVehicle(
|
||||
id=len(orToolsOptimizationVehicles),
|
||||
name=vehicle.name,
|
||||
route_type=self.__route_type(vehicle.type),
|
||||
capacity=vehicle.capacity,
|
||||
range_km=vehicle.range / 1000,
|
||||
working_time_h=vehicle.deliveryTime,
|
||||
priority=i < vehicle.minQuantity,
|
||||
districts=vehicle.districts.split(",")
|
||||
)
|
||||
|
||||
# Assign district to vehicle
|
||||
if len(districts) > 0:
|
||||
district = districts.pop(0)
|
||||
tempOrVehicleIndex_district[orVehicle.id] = district
|
||||
orVehicleIndex_district[orVehicle.id] = district
|
||||
|
||||
log.info(orVehicle)
|
||||
optimizationVehicleAll.append(vehicle)
|
||||
orToolsOptimizationVehicles.append(orVehicle)
|
||||
|
||||
# TODO: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! On backend get initial district from crn if no confirmed optimization allready exists otherwise get initial district from last confirmed optimization.
|
||||
initialRoutes: list[list[CrnPoint]] = []
|
||||
if config.set_initial:
|
||||
log.info("Construct initial routes for vehicles")
|
||||
for i, vehicle in enumerate(orToolsOptimizationVehicles):
|
||||
vehicleDistrict = tempOrVehicleIndex_district.pop(i, None)
|
||||
initialRoutes.append([])
|
||||
if vehicleDistrict is None:
|
||||
continue
|
||||
for route in initialOptimizationResultData.optimizationResult.routes:
|
||||
if route.name == vehicleDistrict:
|
||||
for routePoint in route.points:
|
||||
if routePoint.crnPoint.hisa != 0:
|
||||
initialRoutes[-1].append(routePoint.crnPoint)
|
||||
if len(initialRoutes[-1]) > 0:
|
||||
route = [ir.hisa for ir in initialRoutes[-1]]
|
||||
log.info([
|
||||
f"{i}. {vehicleDistrict}.{vehicle.name}[{len(route)}]:",
|
||||
f"{route[0]}", f"->", f"{route[-1]}", "...", f"{route}"
|
||||
])
|
||||
|
||||
log.info('Mapping optimization matrices')
|
||||
time_matrix: dict[str, np.ndarray] = {}
|
||||
distance_matrix: dict[str, np.ndarray] = {}
|
||||
for vehicle_type, routeMatrix in routeMatrices.items():
|
||||
vehicle_type_name = self.__route_type(type=vehicle_type)
|
||||
time_matrix[vehicle_type_name] = routeMatrix.durationMatrix()
|
||||
distance_matrix[vehicle_type_name] = routeMatrix.distanceMatrix()
|
||||
|
||||
# Creating configuration for optimization
|
||||
orToolsInstance = OrToolsOptimizationInstance(
|
||||
vehicles=orToolsOptimizationVehicles,
|
||||
points=orToolsOptimizationPoints,
|
||||
distance_matrix=distance_matrix,
|
||||
time_matrix=time_matrix,
|
||||
initial_routes=[], # <---------------------------- SET THIS LATER!!!
|
||||
district_percentage=optimization.weight / 100,
|
||||
log=log
|
||||
)
|
||||
|
||||
log.info(f"Use unvisited crns: {optimization.useUnvisitedCrn}")
|
||||
unvisitedOptimizationPoints: list[OptimizationPoint] = []
|
||||
if not optimization.useUnvisitedCrn:
|
||||
visitedOptimizationPoints = list(filter(lambda op: op.isVisited, optimizationPoints))
|
||||
unvisitedOptimizationPoints = list(filter(lambda op: not op.isVisited, optimizationPoints))
|
||||
log.warning(f"Unvisited crns[{len(unvisitedOptimizationPoints)}]: {percentage(unvisitedOptimizationPoints, optimizationPoints)}%")
|
||||
orToolsInstance = self.__filteredOrToolsInstance(orToolsInstance=orToolsInstance, optimizationPoints=visitedOptimizationPoints)
|
||||
initialRoutes = self.__checkAndBalanceInitialRoutes(initialRoutes=initialRoutes, optimizationPoints=optimizationPoints, log=log)
|
||||
initialRoutes = self.__filterInitialRoutes(initialRoutes=initialRoutes, optimizationPoints=visitedOptimizationPoints)
|
||||
|
||||
log.info("Put initial route crn indexes to initial routes as is their place in optimization points list")
|
||||
crn_optimizationPointIndex: dict[int, int] = {}
|
||||
for i, op in enumerate(orToolsInstance.points):
|
||||
hisa = int(op.hisa_id)
|
||||
if hisa != 0:
|
||||
crn_optimizationPointIndex[hisa] = i
|
||||
|
||||
log.info("Set initial routes")
|
||||
orToolsInstance.initial_routes = [[crn_optimizationPointIndex[crnPoint.hisa] for crnPoint in route] for route in initialRoutes]
|
||||
|
||||
# Stop callback
|
||||
def stop_callback_fn() -> bool:
|
||||
return terminationCallback()
|
||||
|
||||
# Solution callback
|
||||
def solution_callback_fn(objective: int, solution: list[OrToolsOptimizationSolution], finished: bool, overlapping: dict[int, float] | None):
|
||||
mappedSolution = []
|
||||
for os in solution:
|
||||
optimizationVehicle = optimizationVehicleAll[os.vehicle_id]
|
||||
|
||||
district = None
|
||||
if optimization.weight > 0:
|
||||
district = os.district if os.district is not None else orVehicleIndex_district.get(os.vehicle_id, None)
|
||||
|
||||
kwargs = dict(
|
||||
isExtra=os.dummy,
|
||||
optimizationVehicleId=optimizationVehicle.id,
|
||||
hise=[int(hi) for hi in os.hisa_ids],
|
||||
distance=os.distance,
|
||||
duration=os.duration,
|
||||
district=district,
|
||||
cost=os.cost
|
||||
)
|
||||
mappedSolution.append(OptimizationSolution(**kwargs))
|
||||
|
||||
solutionCallback(objective, mappedSolution, finished, unvisitedOptimizationPoints, overlapping)
|
||||
|
||||
if optimization.type == OptimizationType.INITIAL:
|
||||
return self.__generateInitialSolution(
|
||||
solvingTime=optimization.optimizationTime,
|
||||
orToolsInstance=orToolsInstance,
|
||||
solutionCallback=solution_callback_fn,
|
||||
district_centrality=optimization.useDistrictCentrality,
|
||||
log=log,
|
||||
)
|
||||
elif optimization.type == OptimizationType.TEST:
|
||||
return self.__generateTestSolution(
|
||||
testingOptimizationPoints=self.__filterOptimizationPoints(
|
||||
optimization=optimization, optimizationPoints=optimizationPoints, log=log),
|
||||
solvingTime=optimization.optimizationTime,
|
||||
orToolsInstance=orToolsInstance,
|
||||
solutionCallback=solution_callback_fn,
|
||||
log=log,
|
||||
district_centrality=optimization.useDistrictCentrality,
|
||||
)
|
||||
|
||||
# Starting optimization and getting final solution
|
||||
objective, finalSolution, overlapping = OrToolsOptimizationService().vrpOptimization(
|
||||
solving_time_sec=int(optimization.optimizationTime.total_seconds()),
|
||||
instance=orToolsInstance,
|
||||
config=config,
|
||||
solution_callback_fn=lambda obj, sol, over: solution_callback_fn(objective=obj, solution=sol, finished=False, overlapping=over),
|
||||
stop_callback_fn=stop_callback_fn,
|
||||
log=log
|
||||
)
|
||||
|
||||
solution_callback_fn(objective=objective, solution=finalSolution, finished=True, overlapping=overlapping)
|
||||
|
||||
def __filterInitialRoutes(self, initialRoutes: list[list[CrnPoint]], optimizationPoints: list[OptimizationPoint]) -> list[list[CrnPoint]]:
|
||||
""" Filter initial crns that are present inside optimization points """
|
||||
allowedHise = [op.crnPoint.hisa for op in optimizationPoints]
|
||||
filteredInitialRoutes = []
|
||||
for route in initialRoutes:
|
||||
filteredInitialRoute = []
|
||||
for crnPoint in route:
|
||||
if crnPoint.hisa in allowedHise:
|
||||
filteredInitialRoute.append(crnPoint)
|
||||
filteredInitialRoutes.append(filteredInitialRoute)
|
||||
return filteredInitialRoutes
|
||||
|
||||
def __checkAndBalanceInitialRoutes(
|
||||
self, initialRoutes: list[list[CrnPoint]], optimizationPoints: list[OptimizationPoint], log: Logger
|
||||
) -> list[list[CrnPoint]]:
|
||||
if len(initialRoutes) == 0:
|
||||
return []
|
||||
|
||||
""" Add missing initial crn points, remove not needed crn points """
|
||||
log.warning("Start balancing initial routes")
|
||||
|
||||
log.info("Create crn mapping with optimization points as priority")
|
||||
hisa_crn: dict[int, CrnPoint] = {}
|
||||
hisa_initial_district: dict[int, int] = {}
|
||||
for district, initialRoute in enumerate(initialRoutes):
|
||||
for ip in initialRoute:
|
||||
hisa_crn[ip.hisa] = ip
|
||||
hisa_initial_district[ip.hisa] = district
|
||||
for op in optimizationPoints:
|
||||
hisa_crn[op.crnPoint.hisa] = op.crnPoint
|
||||
|
||||
log.info("Get all initial crns")
|
||||
initialHise = []
|
||||
for initialRoute in initialRoutes:
|
||||
for ip in initialRoute:
|
||||
initialHise.append(ip.hisa)
|
||||
|
||||
log.info("Get all optimization crns")
|
||||
optimizationHise = {op.crnPoint.hisa for op in optimizationPoints}
|
||||
uniqueInitialHise = set(initialHise)
|
||||
|
||||
# Check for duplicates
|
||||
if len(uniqueInitialHise) != len(initialHise):
|
||||
Exception(f"Initial routes contains duplicates: {[k for (k, v) in Counter(initialHise).items() if v > 1]} ")
|
||||
if len(optimizationHise) != len(optimizationPoints):
|
||||
opHise = [op.crnPoint.hisa for op in optimizationPoints]
|
||||
raise Exception(f"Optimization points contains duplicates: {[k for (k, v) in Counter(opHise).items() if v > 1]} ")
|
||||
|
||||
allCrns = list(hisa_crn.values())
|
||||
allCrnLocations = [crn.location.ballVector for crn in allCrns]
|
||||
crnBallTree = BallTree(allCrnLocations, metric='haversine')
|
||||
|
||||
missingInitialHise = optimizationHise - uniqueInitialHise
|
||||
notUsedInitialHise = uniqueInitialHise - optimizationHise
|
||||
|
||||
if len(missingInitialHise) > 0:
|
||||
log.warning(f"Missing initial crns: {len(missingInitialHise)}: {missingInitialHise}")
|
||||
if len(notUsedInitialHise) > 0:
|
||||
log.warning(f"Not used initial crns: {len(notUsedInitialHise)}: {notUsedInitialHise}")
|
||||
|
||||
# Insert missing crns to initial routes
|
||||
log.info("Insert missing crns to initial routes")
|
||||
for mih in missingInitialHise:
|
||||
if mih == 0: # DO NOT INSERT POST OFFICE TO INITIAL ROUTES!!!!!!!!!!!!!!!
|
||||
continue
|
||||
missingCrn = hisa_crn[mih]
|
||||
closestCrnIndexes = crnBallTree.query([missingCrn.location.ballVector], k=int(len(optimizationPoints) / 2), return_distance=False)[0][:1]
|
||||
|
||||
# Find to which district we can insert missing district
|
||||
inserted = False
|
||||
for closestCrnIndex in closestCrnIndexes:
|
||||
closestCrn = allCrns[closestCrnIndex]
|
||||
# We found closest crn that exists in initial districts we know where to insert it...
|
||||
if closestCrn.hisa in hisa_initial_district:
|
||||
closestCrnDistrict = hisa_initial_district[closestCrn.hisa]
|
||||
initialRoutes[closestCrnDistrict].append(missingCrn)
|
||||
inserted = True
|
||||
break
|
||||
|
||||
# If I could not inserted crn insert in random initial route
|
||||
if not inserted:
|
||||
initialRoutes[0].append(missingCrn)
|
||||
|
||||
# Remove not used initial crns
|
||||
for nuih in notUsedInitialHise:
|
||||
notUsedCrn = hisa_crn[nuih]
|
||||
notUsedCrnDistrict = hisa_initial_district[nuih]
|
||||
initialRoutes[notUsedCrnDistrict].remove(notUsedCrn)
|
||||
|
||||
return initialRoutes
|
||||
|
||||
def __route_type(self, type: TransportMode) -> str:
|
||||
match type:
|
||||
case TransportMode.BIKE:
|
||||
return 'bike'
|
||||
case TransportMode.CAR:
|
||||
return 'car'
|
||||
case TransportMode.EV:
|
||||
return 'ev'
|
||||
case TransportMode.KM:
|
||||
return 'km'
|
||||
case TransportMode.KPM:
|
||||
return 'kpm'
|
||||
case TransportMode.MK:
|
||||
return 'mk'
|
||||
case TransportMode.WALK:
|
||||
return 'foot'
|
||||
case _:
|
||||
raise TypeError(f"Mapping for transport mode does not exists: {type}")
|
||||
|
||||
def __crn_type(self, type: OptimizationPointType) -> Literal['crn', 'depot', 'refill']:
|
||||
if type == OptimizationPointType.CRN:
|
||||
return 'crn'
|
||||
elif type == OptimizationPointType.POSTA:
|
||||
return 'depot'
|
||||
elif type == OptimizationPointType.DOSTAVNIK:
|
||||
return 'refill'
|
||||
|
||||
raise TypeError(f"CRN type '{type}' currently not supported!")
|
||||
|
||||
def __filteredOrToolsInstance(
|
||||
self, orToolsInstance: OrToolsOptimizationInstance, optimizationPoints: list[OptimizationPoint]
|
||||
) -> OrToolsOptimizationInstance:
|
||||
|
||||
depotOptPoint = orToolsInstance.points[0]
|
||||
crnOptPoints = orToolsInstance.points[1:]
|
||||
filteredHisaIds = [tOptP.crnPoint.hisa for tOptP in optimizationPoints]
|
||||
|
||||
optPointIndexes = [0]
|
||||
newOtimizationPoints = [depotOptPoint] # Depot must be on the first index!!!!!!!!!!!
|
||||
|
||||
# Fetch district optimization points and indexes for generating matrixes
|
||||
for i, crnOptPoint in enumerate(crnOptPoints):
|
||||
if int(crnOptPoint.hisa_id) in filteredHisaIds:
|
||||
optPointIndexes.append(i + 1)
|
||||
newOtimizationPoints.append(crnOptPoint)
|
||||
|
||||
# Reset index to match new distance and time matrix
|
||||
for i, optPoint in enumerate(newOtimizationPoints):
|
||||
optPoint.id = i
|
||||
|
||||
# Generate new distance matrices
|
||||
distance_matrix: dict[str, np.ndarray] = {}
|
||||
for vehicleType, matrix in orToolsInstance.distance_matrix.items():
|
||||
distance_matrix[vehicleType] = matrix[np.ix_(optPointIndexes, optPointIndexes)]
|
||||
|
||||
# Generate new time matrices
|
||||
time_matrix: dict[str, np.ndarray] = {}
|
||||
for vehicleType, matrix in orToolsInstance.time_matrix.items():
|
||||
time_matrix[vehicleType] = matrix[np.ix_(optPointIndexes, optPointIndexes)]
|
||||
|
||||
orToolsInstance.points = newOtimizationPoints
|
||||
orToolsInstance.distance_matrix = distance_matrix
|
||||
orToolsInstance.time_matrix = time_matrix
|
||||
|
||||
return orToolsInstance
|
||||
|
||||
def __generateTestSolution(
|
||||
self, solvingTime: timedelta,
|
||||
testingOptimizationPoints: list[OptimizationPoint],
|
||||
orToolsInstance: OrToolsOptimizationInstance,
|
||||
solutionCallback: Callable[[int, list[OrToolsOptimizationSolution], bool, Optional[dict[int, float]]], None],
|
||||
log: Logger,
|
||||
district_centrality: bool
|
||||
):
|
||||
log.info("Generating test solution")
|
||||
|
||||
orToolsInstance = self.__filteredOrToolsInstance(orToolsInstance=orToolsInstance, optimizationPoints=testingOptimizationPoints)
|
||||
|
||||
# Starting optimization and getting final solution
|
||||
objective, solution, overlapping = OrToolsOptimizationService().vrpOptimization(
|
||||
solving_time_sec=int(solvingTime.total_seconds()),
|
||||
instance=orToolsInstance,
|
||||
config=self.config(setInitial=False, district_centering=district_centrality),
|
||||
log=log
|
||||
)
|
||||
|
||||
solutionCallback(objective, solution, True, overlapping)
|
||||
|
||||
def __generateInitialSolution(
|
||||
self, solvingTime: timedelta, orToolsInstance: OrToolsOptimizationInstance,
|
||||
solutionCallback: Callable[[int, list[OrToolsOptimizationSolution], bool, Optional[dict[int, float]]], None],
|
||||
log: Logger,
|
||||
district_centrality: bool
|
||||
|
||||
):
|
||||
log.info("Generating initial solution")
|
||||
|
||||
# Remove vehicles constraints
|
||||
for vehicle in orToolsInstance.vehicles:
|
||||
vehicle.working_time_h = 1e3
|
||||
vehicle.range_km = 1e3
|
||||
vehicle.capacity = 1e3
|
||||
|
||||
depotOptPoint = orToolsInstance.points[0]
|
||||
crnOptPoints = orToolsInstance.points[1:]
|
||||
|
||||
districts = set([optPoint.district for optPoint in crnOptPoints]) # Depot is on the first index!!!!!!!!!
|
||||
solvingTimeSec = int((solvingTime / len(districts)).total_seconds())
|
||||
combinedSolutions = []
|
||||
combinedObjective = 0
|
||||
|
||||
for districtI, district in enumerate(sorted(list(districts))):
|
||||
log.info(f"Optimizing district[{districtI}/{len(districts)}] = '{district}'")
|
||||
|
||||
log.info(f"Searching for appropriate vehicles for district '{district}'")
|
||||
districtVehicles = []
|
||||
for vehicle in orToolsInstance.vehicles:
|
||||
if district in vehicle.districts:
|
||||
log.info(f"Found vehicle: {vehicle}")
|
||||
districtVehicles.append(vehicle)
|
||||
|
||||
districtVehicles = districtVehicles[:1]
|
||||
log.info(f"Force one vehicle for district '{district}': {districtVehicles}")
|
||||
|
||||
if len(districtVehicles) == 0:
|
||||
log.warning(f"No vehicles found for district '{district}' (using any free vehicle that has no district assigned) instead")
|
||||
districtVehicles = [vehicle for vehicle in orToolsInstance.vehicles if len(vehicle.districts) == 0]
|
||||
|
||||
districtOptPointIndexes = [0]
|
||||
districtOptPoints = [depotOptPoint] # Depot must be on the first index!!!!!!!!!!!
|
||||
|
||||
# Fetch district optimization points and indexes for generating matrixes
|
||||
for crnI, crnOptPoint in enumerate(crnOptPoints):
|
||||
if crnOptPoint.district == district:
|
||||
districtOptPointIndexes.append(crnI + 1)
|
||||
districtOptPoints.append(crnOptPoint)
|
||||
elif crnOptPoint.district not in districts:
|
||||
log.warning(f"CRN without district: {crnOptPoint}")
|
||||
|
||||
# Reset index to match new distance and time matrix
|
||||
for optPointI, optPoint in enumerate(districtOptPoints):
|
||||
optPoint.id = optPointI
|
||||
|
||||
# Generate new distance matrices
|
||||
district_distance_matrix: dict[str, np.ndarray] = {}
|
||||
for vehicleType, matrix in orToolsInstance.distance_matrix.items():
|
||||
district_distance_matrix[vehicleType] = matrix[np.ix_(districtOptPointIndexes, districtOptPointIndexes)]
|
||||
|
||||
# Generate new time matrices
|
||||
district_time_matrix: dict[str, np.ndarray] = {}
|
||||
for vehicleType, matrix in orToolsInstance.distance_matrix.items():
|
||||
district_time_matrix[vehicleType] = matrix[np.ix_(districtOptPointIndexes, districtOptPointIndexes)]
|
||||
|
||||
districtOrToolsInstance = OrToolsOptimizationInstance(
|
||||
vehicles=districtVehicles,
|
||||
points=districtOptPoints,
|
||||
distance_matrix=district_distance_matrix,
|
||||
time_matrix=district_time_matrix,
|
||||
initial_routes=[[]],
|
||||
log=log
|
||||
)
|
||||
|
||||
# Starting optimization and getting final solution
|
||||
objective, districtSolutions, overlapping = OrToolsOptimizationService().vrpOptimization(
|
||||
solving_time_sec=solvingTimeSec,
|
||||
instance=districtOrToolsInstance,
|
||||
config=self.config(setInitial=False, district_centering=district_centrality),
|
||||
log=log
|
||||
)
|
||||
|
||||
numOfDistrictSolutions = len(districtSolutions)
|
||||
if numOfDistrictSolutions != 1:
|
||||
raise Exception(f"Solution for one district should have one solution but instead has: {numOfDistrictSolutions}")
|
||||
|
||||
for solution in districtSolutions:
|
||||
solution.vehicle_id = districtVehicles[solution.vehicle_id].id
|
||||
solution.district = district
|
||||
combinedSolutions.append(solution)
|
||||
combinedObjective += objective
|
||||
|
||||
solutionCallback(objective, combinedSolutions, False, None)
|
||||
|
||||
solutionCallback(combinedObjective, combinedSolutions, True, None)
|
||||
|
||||
def __filterOptimizationPoints(
|
||||
self, optimization: Optimization, optimizationPoints: list[OptimizationPoint], log: Logger
|
||||
) -> list[OptimizationPoint]:
|
||||
|
||||
optPoints = []
|
||||
|
||||
titleInfo = optimization.title.split()
|
||||
log.info(f"Optimization parameters: {titleInfo}")
|
||||
|
||||
match titleInfo[0]:
|
||||
case "RADIUS":
|
||||
radius = float(titleInfo[1])
|
||||
depot = optimizationPoints[0]
|
||||
for optPoint in optimizationPoints[1:]:
|
||||
if depot.crnPoint.location.distance(optPoint.crnPoint.location) < radius:
|
||||
optPoints.append(optPoint)
|
||||
case "SQUARE":
|
||||
lats = [float(titleInfo[1]), float(titleInfo[3])]
|
||||
lons = [float(titleInfo[2]), float(titleInfo[4])]
|
||||
for optPoint in optimizationPoints[1:]:
|
||||
if lats[0] < optPoint.crnPoint.location.lat < lats[1] and lons[0] < optPoint.crnPoint.location.lon < lons[1]:
|
||||
optPoints.append(optPoint)
|
||||
case "STREET":
|
||||
streetName = titleInfo[1]
|
||||
for optPoint in optimizationPoints[1:]:
|
||||
if streetName in optPoint.crnPoint.naslov:
|
||||
optPoints.append(optPoint)
|
||||
case _:
|
||||
raise Exception(f"Unknown testing category '{titleInfo[0]}'")
|
||||
|
||||
log.info(f"Testing optimization points: {len(optPoints)}")
|
||||
return optPoints
|
39
admiral-worker/buildSrc/common.mk
Normal file
39
admiral-worker/buildSrc/common.mk
Normal file
@ -0,0 +1,39 @@
|
||||
VIRTUAL_ENV = venv
|
||||
PATH := $(VIRTUAL_ENV)/bin:$(PATH)
|
||||
PWD=$(shell pwd)
|
||||
VERSION=$(shell grep "^project_version=" "../posta-poi-app-backend/project.properties" | cut -d'=' -f2)
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
PACKAGE = mylinux
|
||||
NOW_DATE=`date +%Y-%m-%d %H:%M:%S`
|
||||
|
||||
define BROWSER_PYSCRIPT
|
||||
import os, webbrowser, sys
|
||||
try:
|
||||
from urllib import pathname2url
|
||||
except:
|
||||
from urllib.request import pathname2url
|
||||
webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1])))
|
||||
endef
|
||||
export BROWSER_PYSCRIPT
|
||||
BROWSER := python -c "$$BROWSER_PYSCRIPT"
|
||||
|
||||
|
||||
define PRINT_HELP_PYSCRIPT
|
||||
import re, sys
|
||||
for line in sys.stdin:
|
||||
match = re.match(r'^### (.*) #', line)
|
||||
if match:
|
||||
target = match.groups()[0]
|
||||
print("\n%s" % (target))
|
||||
match = re.match(r'^([a-zA-Z0-9_-]+):.*?## (.*)$$', line)
|
||||
if match:
|
||||
target, help = match.groups()
|
||||
print(" %-15s %s" % (target, help))
|
||||
endef
|
||||
export PRINT_HELP_PYSCRIPT
|
||||
|
||||
|
||||
help:
|
||||
@python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
|
58
admiral-worker/cli/run_optimizationResults_migrations.py
Normal file
58
admiral-worker/cli/run_optimizationResults_migrations.py
Normal file
@ -0,0 +1,58 @@
|
||||
from collections import defaultdict
|
||||
from uuid import UUID
|
||||
|
||||
from app.App import App
|
||||
from core.domain.map.CrnMicroUpdate import CrnMicroUpdateState, CrnMicroUpdate
|
||||
from core.domain.map.CrnPoint import CrnPoint
|
||||
from core.extend.logger import Progress
|
||||
from core.types.Id import Id
|
||||
|
||||
App.init()
|
||||
|
||||
# ids = App.repos.optimizationResultRepo.getAllIds()
|
||||
microLocations = App.repos.crnMicroUpdateRepo.getAllByState(state=CrnMicroUpdateState.POTRJENO)
|
||||
ids = [Id(value=UUID('70faa8ef-83da-4b5f-9e76-84d1d5acb995'))]
|
||||
posta_hisa_crn: dict[int, dict[int, (CrnPoint, CrnMicroUpdate)]] = defaultdict(dict)
|
||||
hisa_micro: dict[int, CrnMicroUpdate] = {}
|
||||
|
||||
for m in sorted(microLocations, key=lambda x: x.createdAt):
|
||||
hisa_micro[m.hisa] = m
|
||||
|
||||
progress = Progress("Optimization result migration", ids)
|
||||
for i, id in enumerate(ids):
|
||||
optimizationResult = App.repos.optimizationResultRepo.get(id=id)
|
||||
opt = App.repos.optimizationRepo.get(id=optimizationResult.optimizationId)
|
||||
posta = opt.posta.value
|
||||
postOffice = App.repos.postOfficeRepo.get(posta=posta)
|
||||
if posta not in posta_hisa_crn:
|
||||
posta_hisa_crn[posta] = {crn.hisa: crn for crn in App.services.postaService.getCrnPoints(posta=posta)}
|
||||
hisa_crn = posta_hisa_crn[posta]
|
||||
for ri, route in enumerate(optimizationResult.routes):
|
||||
for pi, point in enumerate(route.points):
|
||||
point.crnPoint.attr.dostopnost = 'Kolo'
|
||||
point.crnPoint.attr.namen = None
|
||||
point.crnPoint.attr.nevarenPes = None
|
||||
point.crnPoint.attr.dostopenNabiralnik = None
|
||||
|
||||
mikro = hisa_micro.get(point.crnPoint.hisa, None)
|
||||
if mikro is not None:
|
||||
point.crnPoint.attr.mikroLat = mikro.lat
|
||||
point.crnPoint.attr.mikroLon = mikro.lon
|
||||
|
||||
optimizationResult.routes[ri].points[pi] = point
|
||||
|
||||
for pi, point in enumerate(optimizationResult.unvisited):
|
||||
point.crnPoint.attr.dostopnost = 'Kolo'
|
||||
point.crnPoint.attr.namen = None
|
||||
point.crnPoint.attr.nevarenPes = None
|
||||
point.crnPoint.attr.dostopenNabiralnik = None
|
||||
|
||||
mikro = hisa_micro.get(point.crnPoint.hisa, None)
|
||||
if mikro is not None:
|
||||
point.crnPoint.attr.mikroLat = mikro.lat
|
||||
point.crnPoint.attr.mikroLon = mikro.lon
|
||||
|
||||
optimizationResult.unvisited[pi] = point
|
||||
|
||||
App.repos.optimizationResultRepo.post(optimizationResult)
|
||||
progress.log(i, f"https://optdos.posta.si/optimizations/{opt.id.value}/result/{optimizationResult.id.value}")
|
12
admiral-worker/cli/run_optimization_worker.py
Normal file
12
admiral-worker/cli/run_optimization_worker.py
Normal file
@ -0,0 +1,12 @@
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
from core.extend.logger import logFile
|
||||
|
||||
print(f"Logfile: {logFile}")
|
||||
from app.App import App
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
App.init()
|
||||
App.usecases.run_optimization_worker.now()
|
11
admiral-worker/core/Env.py
Normal file
11
admiral-worker/core/Env.py
Normal file
@ -0,0 +1,11 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class Env:
|
||||
LOGGING_DIR: str = os.getenv("LOGGING_DIR")
|
||||
KEYSET_ENCRYPTED: str = os.getenv("KEYSET_ENCRYPTED")
|
||||
FTP_PATH: str = os.getenv("FTP_PATH")
|
125
admiral-worker/core/Utils.py
Normal file
125
admiral-worker/core/Utils.py
Normal file
@ -0,0 +1,125 @@
|
||||
import hashlib
|
||||
import json
|
||||
from collections import Counter
|
||||
from dataclasses import fields
|
||||
from datetime import timedelta, datetime, date
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import pytz
|
||||
from pypolyline.cutil import decode_polyline
|
||||
from pytz import tzinfo
|
||||
|
||||
from core.domain.map.GeoLocation import GeoLocation
|
||||
from core.types.Id import Id
|
||||
from core.types.IntId import IntId
|
||||
|
||||
|
||||
def hash(o: str, size: Optional[int] = None) -> str:
|
||||
# TODO return this!: hash = hashlib.sha1(o.encode()).hexdigest()
|
||||
# if size is not None:
|
||||
# hash = hash[-size:]
|
||||
return o
|
||||
|
||||
|
||||
def fileHash(path: Path, size: Optional[int] = None) -> str:
|
||||
with path.open('r', encoding='utf-8') as f:
|
||||
text = f.read()
|
||||
hash = hashlib.sha1(text.encode()).hexdigest()
|
||||
if size is not None:
|
||||
hash = hash[-size:]
|
||||
return hash
|
||||
|
||||
|
||||
def chunks(data, size):
|
||||
for i in range(0, len(data), size):
|
||||
yield data[i:i + size]
|
||||
|
||||
|
||||
def hash8(o: str) -> str:
|
||||
return hash(o, size=8)
|
||||
|
||||
|
||||
def encode(o: any):
|
||||
if isinstance(o, Id):
|
||||
return str(o.value)
|
||||
if isinstance(o, IntId):
|
||||
return int(o.value)
|
||||
if isinstance(o, timedelta):
|
||||
return int(o.total_seconds())
|
||||
if isinstance(o, datetime):
|
||||
return int(o.timestamp())
|
||||
if isinstance(o, date):
|
||||
return int(datetime.combine(o, datetime.min.time()).timestamp())
|
||||
if isinstance(o, Enum):
|
||||
return o.value
|
||||
if isinstance(o, set):
|
||||
return list(o)
|
||||
if isinstance(o, list):
|
||||
return o
|
||||
return o.__dict__
|
||||
|
||||
|
||||
def json_dump(o: any, f):
|
||||
json.dump(obj=o, fp=f, default=encode)
|
||||
|
||||
|
||||
def json_dumps(o: any) -> str:
|
||||
return json.dumps(obj=o, default=encode)
|
||||
|
||||
|
||||
def json_loads(o: any) -> str:
|
||||
return json.loads(o)
|
||||
|
||||
|
||||
def current_datetime_str() -> str:
|
||||
return datetime.now().isoformat().replace(':', '_')
|
||||
|
||||
|
||||
def polyline_decode(data: str) -> list[GeoLocation]:
|
||||
return [GeoLocation(lon=arr[0], lat=arr[1]) for arr in decode_polyline(data.encode(), 6)]
|
||||
|
||||
|
||||
def datetimeRange(start: datetime, end: datetime, step: timedelta) -> list[datetime]:
|
||||
dates = []
|
||||
currentDay = start
|
||||
while currentDay <= end:
|
||||
dates.append(currentDay)
|
||||
currentDay += step
|
||||
|
||||
return dates
|
||||
|
||||
|
||||
def saveDivision(a: float, b: float, default: float = None) -> Optional[float]:
|
||||
return a / b if b else default
|
||||
|
||||
|
||||
def percentage(a: list | int, b: list | int) -> Optional[float]:
|
||||
aNum = len(a) if isinstance(a, list) else a
|
||||
bNum = len(b) if isinstance(b, list) else b
|
||||
if bNum == 0:
|
||||
return None
|
||||
return round(aNum / bNum * 100, 2)
|
||||
|
||||
|
||||
def fromLocalToUtc(dt: datetime, localTimezone: tzinfo) -> datetime:
|
||||
utc = localTimezone.localize(dt).astimezone(pytz.utc)
|
||||
return datetime(year=utc.year, month=utc.month, day=utc.day, hour=utc.hour, minute=utc.minute, second=utc.second, microsecond=utc.microsecond)
|
||||
|
||||
|
||||
def dateRange(start: date, end: date) -> set[date]:
|
||||
return set([start + timedelta(days=x) for x in range((end - start).days)] + [end])
|
||||
|
||||
|
||||
def initDataclass(cls: any, **kwargs):
|
||||
valid_field_names = {field.name for field in fields(cls)}
|
||||
return cls(**{k: v for k, v in kwargs.items() if k in valid_field_names})
|
||||
|
||||
# Find legs of optimization result
|
||||
def countDuplicates(arr):
|
||||
# Count occurrences of each number
|
||||
counts = Counter(arr)
|
||||
# Filter numbers that occur more than once
|
||||
duplicates = [(num, count) for num, count in counts.items() if count > 1]
|
||||
return duplicates
|
46
admiral-worker/core/Var.py
Normal file
46
admiral-worker/core/Var.py
Normal file
@ -0,0 +1,46 @@
|
||||
import pytz
|
||||
|
||||
from core import Utils
|
||||
from core.domain.map.GpsSession import GpsSession
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
|
||||
|
||||
class Var:
|
||||
localTimezone = pytz.timezone("Europe/Ljubljana")
|
||||
|
||||
@staticmethod
|
||||
def vrpOptimizationFileName() -> str:
|
||||
return f"vrpOptimization_{Utils.current_datetime_str()}.bin"
|
||||
|
||||
@staticmethod
|
||||
def optimizationFilesZipName(posta: int) -> str:
|
||||
return f"{posta}_optimizationFiles.zip"
|
||||
|
||||
@staticmethod
|
||||
def download_optimizationFilesZipName(posta: int) -> str:
|
||||
timestamp = Utils.current_datetime_str()
|
||||
return f"{timestamp}/download_{posta}_optimizationFiles_{timestamp}.zip"
|
||||
|
||||
@staticmethod
|
||||
def upload_optimizationFilesZipName(posta: int) -> str:
|
||||
return f"upload_{posta}_optimizationFiles_{Utils.current_datetime_str()}.zip"
|
||||
|
||||
@staticmethod
|
||||
def optimizationPointsFileName(posta: int) -> str:
|
||||
return f"{posta}_optimizationPoints.json"
|
||||
|
||||
@staticmethod
|
||||
def gpsFileName(posta: int) -> str:
|
||||
return f"{posta}_gps.csv"
|
||||
|
||||
@staticmethod
|
||||
def gpsSessionFileName(gpsSession: GpsSession) -> str:
|
||||
return f"{gpsSession.id.value}_gpsSession_gps.csv"
|
||||
|
||||
@staticmethod
|
||||
def distanceRouteMatrixFileName(posta: int, transportMode: TransportMode) -> str:
|
||||
return f"{posta}_{transportMode.value}_distance_matrix.npy"
|
||||
|
||||
@staticmethod
|
||||
def durationRouteMatrixFileName(posta: int, transportMode: TransportMode) -> str:
|
||||
return f"{posta}_{transportMode.value}_duration_matrix.npy"
|
14
admiral-worker/core/domain/Delivery.py
Normal file
14
admiral-worker/core/domain/Delivery.py
Normal file
@ -0,0 +1,14 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from core.domain.map.CrnAttributes import CrnAttributes
|
||||
from core.domain.map.GeoLocation import GeoLocation
|
||||
|
||||
|
||||
@dataclass(eq=True, frozen=False)
|
||||
class Package:
|
||||
id: str
|
||||
pickup: GeoLocation
|
||||
dropof: GeoLocation
|
||||
weight: float
|
||||
volume: float
|
26
admiral-worker/core/domain/GeoLocation.py
Normal file
26
admiral-worker/core/domain/GeoLocation.py
Normal file
@ -0,0 +1,26 @@
|
||||
from dataclasses import dataclass
|
||||
from math import cos, asin, sqrt, pi
|
||||
from math import radians
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
|
||||
@dataclass(eq=True, frozen=True)
|
||||
class GeoLocation:
|
||||
lat: float
|
||||
lon: float
|
||||
|
||||
@property
|
||||
def ballVector(self) -> tuple[float, float]:
|
||||
return radians(self.lat), radians(self.lon)
|
||||
|
||||
@property
|
||||
def vector(self) -> tuple[float, float]:
|
||||
return self.lat, self.lon
|
||||
|
||||
def distance(self, geoLocation: Self) -> float:
|
||||
r = 6371000 # m
|
||||
p = pi / 180
|
||||
|
||||
a = 0.5 - cos((geoLocation.lat-self.lat)*p)/2 + cos(self.lat*p) * cos(geoLocation.lat*p) * (1-cos((geoLocation.lon-self.lon)*p))/2
|
||||
return 2 * r * asin(sqrt(a))
|
10
admiral-worker/core/domain/RouteInfo.py
Normal file
10
admiral-worker/core/domain/RouteInfo.py
Normal file
@ -0,0 +1,10 @@
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from core.domain.map.GeoLocation import GeoLocation
|
||||
|
||||
|
||||
@dataclass
|
||||
class RouteInfo:
|
||||
distance: float # Kilometers
|
||||
duration: float # Seconds
|
||||
steps: list[GeoLocation] = field(default_factory=list)
|
32
admiral-worker/core/domain/optimization/Optimization.py
Normal file
32
admiral-worker/core/domain/optimization/Optimization.py
Normal file
@ -0,0 +1,32 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.domain.optimization.OptimizationState import OptimizationState
|
||||
from core.domain.map.PostOffice import PostOffice
|
||||
from core.domain.optimization.OptimizationType import OptimizationType
|
||||
from core.types.Id import Id
|
||||
from core.types.IntId import IntId
|
||||
|
||||
|
||||
@dataclass
|
||||
class Optimization:
|
||||
posta: IntId[PostOffice]
|
||||
title: str
|
||||
description: str
|
||||
dates: list[datetime.date]
|
||||
weight: int
|
||||
optimizationTime: datetime.timedelta
|
||||
createdAt: datetime.datetime
|
||||
authorizedByUserId: str
|
||||
state: OptimizationState
|
||||
type: OptimizationType
|
||||
parent: Optional[Id[Self]]
|
||||
useFrequency: bool = False
|
||||
useUnvisitedCrn: bool = True
|
||||
useDistrictCentrality: bool = True
|
||||
staticServiceTimes: Optional[int] = None
|
||||
stateChangedAt: datetime.datetime = field(default_factory=datetime.datetime.now)
|
||||
id: Id[Self] = Id.field()
|
11
admiral-worker/core/domain/optimization/OptimizationFiles.py
Normal file
11
admiral-worker/core/domain/optimization/OptimizationFiles.py
Normal file
@ -0,0 +1,11 @@
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from core.domain.map.RouteMatrix import RouteMatrix
|
||||
from core.domain.optimization.OptimizationPoint import OptimizationPoint
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationFiles:
|
||||
optimizationPoints: list[OptimizationPoint] = field(default_factory=list)
|
||||
routeMatrices: dict[TransportMode, RouteMatrix] = field(default_factory=dict)
|
@ -0,0 +1,18 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationMetrics:
|
||||
optimizationId: Id[Optimization]
|
||||
solution: int
|
||||
vehicles: int
|
||||
cost: float
|
||||
distance: float
|
||||
duration: float
|
||||
overlapping: Optional[dict[int, float]]
|
||||
createdAt: datetime
|
27
admiral-worker/core/domain/optimization/OptimizationPoint.py
Normal file
27
admiral-worker/core/domain/optimization/OptimizationPoint.py
Normal file
@ -0,0 +1,27 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from core.domain.map.CrnPoint import CrnPoint
|
||||
from core.domain.optimization.OptimizationPointType import OptimizationPointType
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationPoint:
|
||||
crnPoint: CrnPoint
|
||||
serviceTime: timedelta
|
||||
demand: int
|
||||
visitFrequency: float
|
||||
type: OptimizationPointType
|
||||
group = None
|
||||
|
||||
@staticmethod
|
||||
def fromJson(**kwargs) -> 'OptimizationPoint':
|
||||
kwargs['serviceTime'] = timedelta(seconds=kwargs['serviceTime'])
|
||||
kwargs['crnPoint'] = CrnPoint.fromJson(**kwargs['crnPoint'])
|
||||
kwargs['type'] = OptimizationPointType(kwargs['type'])
|
||||
|
||||
return OptimizationPoint(**kwargs)
|
||||
|
||||
@property
|
||||
def isVisited(self):
|
||||
return self.visitFrequency > 0 and self.crnPoint.isVisited
|
@ -0,0 +1,12 @@
|
||||
from enum import auto
|
||||
|
||||
from core.types.AutoStrEnum import AutoStrEnum
|
||||
|
||||
|
||||
class OptimizationPointType(AutoStrEnum):
|
||||
CRN = auto()
|
||||
POSTA = auto()
|
||||
DOSTAVNIK = auto()
|
||||
IZROCILNA_POSTA = auto()
|
||||
POSTNI_NABIRALNIK = auto()
|
||||
CRN_NEOBISKANA = auto()
|
@ -0,0 +1,23 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationPoint import OptimizationPoint
|
||||
from core.domain.optimization.OptimizationRoute import OptimizationRoute
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationResult:
|
||||
optimizationId: Id[Optimization]
|
||||
routes: list[OptimizationRoute]
|
||||
info: str
|
||||
authorizedByUserId: str
|
||||
parent: Optional[Id[Self]]
|
||||
createdAt: int = field(default_factory=lambda: int(datetime.datetime.now().timestamp()))
|
||||
unvisited: list[OptimizationPoint] = field(default_factory=list)
|
||||
|
||||
id: Id[Self] = Id.field()
|
@ -0,0 +1,18 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationResult import OptimizationResult
|
||||
from core.domain.optimization.OptimizationState import OptimizationState
|
||||
from core.domain.map.PostOffice import PostOffice
|
||||
from core.domain.optimization.OptimizationType import OptimizationType
|
||||
from core.types.Id import Id
|
||||
from core.types.IntId import IntId
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationResultData:
|
||||
optimization: Optimization
|
||||
optimizationResult: OptimizationResult
|
43
admiral-worker/core/domain/optimization/OptimizationRoute.py
Normal file
43
admiral-worker/core/domain/optimization/OptimizationRoute.py
Normal file
@ -0,0 +1,43 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
from core import Utils
|
||||
from core.Utils import initDataclass
|
||||
from core.domain.map.GeoLocation import GeoLocation
|
||||
from core.domain.optimization.OptimizationPoint import OptimizationPoint
|
||||
from core.domain.optimization.OptimizationVehicle import OptimizationVehicle
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationRoute:
|
||||
name: str
|
||||
isExtra: bool
|
||||
index: int
|
||||
distance: float
|
||||
duration: int
|
||||
cost: float
|
||||
vehicle: OptimizationVehicle
|
||||
points: list[OptimizationPoint]
|
||||
steps: list[GeoLocation]
|
||||
hash: str = field(default_factory=str)
|
||||
|
||||
averageDistance: Optional[float] = None
|
||||
averageDuration: Optional[int] = None
|
||||
|
||||
@staticmethod
|
||||
def fromJson(**kwargs) -> 'OptimizationRoute':
|
||||
kwargs['vehicle'] = OptimizationVehicle(**kwargs['vehicle'])
|
||||
kwargs['points'] = [OptimizationPoint.fromJson(**point) for point in kwargs['points']]
|
||||
kwargs['steps'] = [GeoLocation(**stepDict) for stepDict in kwargs['steps']]
|
||||
|
||||
return initDataclass(OptimizationRoute, **kwargs)
|
||||
|
||||
def setHash(self):
|
||||
self.hash = self.__calculateHash()
|
||||
|
||||
@property
|
||||
def isDirty(self):
|
||||
return self.__calculateHash() != self.hash
|
||||
|
||||
def __calculateHash(self) -> str:
|
||||
return Utils.hash("".join([str(point.crnPoint.hisa) for point in self.points]))
|
@ -0,0 +1,17 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.optimization.OptimizationVehicle import OptimizationVehicle
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationSolution:
|
||||
isExtra: bool
|
||||
optimizationVehicleId: Id[OptimizationVehicle]
|
||||
hise: list[int]
|
||||
distance: int
|
||||
duration: timedelta
|
||||
cost: int
|
||||
district: Optional[str] = None
|
13
admiral-worker/core/domain/optimization/OptimizationState.py
Normal file
13
admiral-worker/core/domain/optimization/OptimizationState.py
Normal file
@ -0,0 +1,13 @@
|
||||
from enum import auto
|
||||
|
||||
from core.types.AutoStrEnum import AutoStrEnum
|
||||
|
||||
|
||||
class OptimizationState(AutoStrEnum):
|
||||
CREATED = auto()
|
||||
ACTIVE = auto()
|
||||
CANCELED = auto()
|
||||
COMPLETED = auto()
|
||||
FAILED = auto()
|
||||
CONFIRMED = auto()
|
||||
DELETED = auto()
|
10
admiral-worker/core/domain/optimization/OptimizationType.py
Normal file
10
admiral-worker/core/domain/optimization/OptimizationType.py
Normal file
@ -0,0 +1,10 @@
|
||||
from enum import auto
|
||||
|
||||
from core.types.AutoStrEnum import AutoStrEnum
|
||||
|
||||
|
||||
class OptimizationType(AutoStrEnum):
|
||||
EXACT = auto()
|
||||
BUILDING_TYPE = auto()
|
||||
INITIAL = auto()
|
||||
TEST = auto()
|
@ -0,0 +1,23 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationVehicle:
|
||||
optimizationId: Id[Optimization]
|
||||
name: str
|
||||
type: TransportMode
|
||||
capacity: int
|
||||
range: float # Kilometers
|
||||
minQuantity: int
|
||||
maxQuantity: int
|
||||
deliveryTime: float # Hours
|
||||
averageSpeed: float
|
||||
maxSpeed: float
|
||||
districts: str
|
||||
id: Id[Self] = Id.field()
|
13
admiral-worker/core/domain/optimization/TransportMode.py
Normal file
13
admiral-worker/core/domain/optimization/TransportMode.py
Normal file
@ -0,0 +1,13 @@
|
||||
from enum import auto
|
||||
|
||||
from core.types.AutoStrEnum import AutoStrEnum
|
||||
|
||||
|
||||
class TransportMode(AutoStrEnum):
|
||||
BIKE = auto()
|
||||
CAR = auto()
|
||||
EV = auto()
|
||||
KM = auto()
|
||||
KPM = auto()
|
||||
MK = auto()
|
||||
WALK = auto()
|
19
admiral-worker/core/domain/worker/Worker.py
Normal file
19
admiral-worker/core/domain/worker/Worker.py
Normal file
@ -0,0 +1,19 @@
|
||||
from dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.domain.worker.WorkerState import WorkerState
|
||||
from core.domain.worker.WorkerType import WorkerType
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
|
||||
@dataclass
|
||||
class Worker:
|
||||
ip: str
|
||||
type: WorkerType
|
||||
state: WorkerState
|
||||
id: Id[Self] = Id.field()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return f"{self.type.value}-{self.ip}"
|
16
admiral-worker/core/domain/worker/WorkerJob.py
Normal file
16
admiral-worker/core/domain/worker/WorkerJob.py
Normal file
@ -0,0 +1,16 @@
|
||||
from dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationState import OptimizationState
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerJob:
|
||||
workerId: Id[Worker]
|
||||
optimizationId: Id[Optimization]
|
||||
name: str
|
||||
state: OptimizationState
|
||||
id: Id[Self] = Id.field()
|
17
admiral-worker/core/domain/worker/WorkerJobStatus.py
Normal file
17
admiral-worker/core/domain/worker/WorkerJobStatus.py
Normal file
@ -0,0 +1,17 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerJobStatus:
|
||||
ownerId: Id[Any]
|
||||
ramTaken: float
|
||||
cpuUtilization: float
|
||||
objective: int
|
||||
createdAt: int = field(default_factory=lambda: int(datetime.datetime.now().timestamp()))
|
||||
|
||||
id: Id[Self] = Id.field()
|
18
admiral-worker/core/domain/worker/WorkerLog.py
Normal file
18
admiral-worker/core/domain/worker/WorkerLog.py
Normal file
@ -0,0 +1,18 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.domain.worker.WorkerLogLevel import WorkerLogLevel
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerLog:
|
||||
context: str
|
||||
data: str
|
||||
ownerId: Id[Any]
|
||||
level: WorkerLogLevel
|
||||
createdAt: float = field(default_factory=lambda: datetime.datetime.now().timestamp())
|
||||
|
||||
id: Id[Self] = Id.field()
|
15
admiral-worker/core/domain/worker/WorkerLogLevel.py
Normal file
15
admiral-worker/core/domain/worker/WorkerLogLevel.py
Normal file
@ -0,0 +1,15 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from enum import auto
|
||||
from typing import Any
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.types.AutoStrEnum import AutoStrEnum
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerLogLevel(AutoStrEnum):
|
||||
DEBUG = auto()
|
||||
INFO = auto()
|
||||
WARN = auto()
|
||||
ERROR = auto()
|
14
admiral-worker/core/domain/worker/WorkerState.py
Normal file
14
admiral-worker/core/domain/worker/WorkerState.py
Normal file
@ -0,0 +1,14 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from enum import auto
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.types.AutoStrEnum import AutoStrEnum
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerState(AutoStrEnum):
|
||||
NORMAL = auto()
|
||||
DEPRECATED = auto()
|
17
admiral-worker/core/domain/worker/WorkerStatus.py
Normal file
17
admiral-worker/core/domain/worker/WorkerStatus.py
Normal file
@ -0,0 +1,17 @@
|
||||
import datetime
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkerStatus:
|
||||
ownerId: Id[Any]
|
||||
ramAvailable: float
|
||||
cpuUtilization: float
|
||||
createdAt: int = field(default_factory=lambda: int(datetime.datetime.now().timestamp()))
|
||||
|
||||
id: Id[Self] = Id.field()
|
9
admiral-worker/core/domain/worker/WorkerType.py
Normal file
9
admiral-worker/core/domain/worker/WorkerType.py
Normal file
@ -0,0 +1,9 @@
|
||||
from enum import auto
|
||||
|
||||
from core.types.AutoStrEnum import AutoStrEnum
|
||||
|
||||
|
||||
class WorkerType(AutoStrEnum):
|
||||
OPTIMIZER = auto()
|
||||
UPDATER = auto()
|
||||
GPS = auto()
|
31
admiral-worker/core/extend/fs.py
Normal file
31
admiral-worker/core/extend/fs.py
Normal file
@ -0,0 +1,31 @@
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from core.Env import Env
|
||||
|
||||
|
||||
def getPath(_file_, *path) -> Path:
|
||||
return Path(_file_).parent.joinpath(*path).resolve()
|
||||
|
||||
|
||||
def getTempPath(*path) -> Path:
|
||||
return Path(tempfile.gettempdir()).joinpath(*path)
|
||||
|
||||
|
||||
def getWorkerPath(*path) -> Path:
|
||||
return Path(Env.FTP_PATH, "..", "worker", *path).resolve()
|
||||
|
||||
|
||||
def getUpdaterPath(*path) -> Path:
|
||||
return Path(Env.FTP_PATH, "..", "updater", *path).resolve()
|
||||
|
||||
|
||||
def getFtpPath(*path) -> Path:
|
||||
return Path(Env.FTP_PATH, *path).resolve()
|
||||
|
||||
def getFtpPaths(*path, glob='**/*') -> list[Path]:
|
||||
p = Path(Env.FTP_PATH, *path).glob(glob)
|
||||
return [x for x in p if x.is_file()]
|
||||
|
||||
def getFtpGpsPath(*path) -> Path:
|
||||
return Path(Env.FTP_PATH, "gps", *path).resolve()
|
86
admiral-worker/core/extend/logger.py
Normal file
86
admiral-worker/core/extend/logger.py
Normal file
@ -0,0 +1,86 @@
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from logging.config import dictConfig
|
||||
from typing import Sized
|
||||
|
||||
import urllib3
|
||||
|
||||
from core.Env import Env
|
||||
from core.extend import fs
|
||||
|
||||
logFile = fs.getPath(__file__, f"../../logs/{Env.LOGGING_DIR}.log").resolve()
|
||||
|
||||
logFile.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# WARGNINGS
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
# ERRORS ON WINDOWS CONSOLE
|
||||
sys.stdin.reconfigure(encoding='utf-8')
|
||||
if hasattr(sys.stdout, "reconfigure"):
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
|
||||
|
||||
logging.config.dictConfig({
|
||||
'version': 1,
|
||||
'disable_existing_loggers': True,
|
||||
'formatters': {
|
||||
'default': {
|
||||
'format': "%(asctime)s | %(processName)s | %(module)40s:%(lineno)-3d | %(levelname)-7s | %(message)s",
|
||||
'datefmt': "%Y.%m.%d %H:%M:%S",
|
||||
},
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'INFO',
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'default',
|
||||
'stream': sys.stderr,
|
||||
},
|
||||
'file': {
|
||||
'level': 'DEBUG',
|
||||
'class': "logging.handlers.RotatingFileHandler",
|
||||
'formatter': 'default',
|
||||
'encoding': 'UTF-8',
|
||||
"filename": logFile,
|
||||
"maxBytes": 1e9,
|
||||
"backupCount": 10,
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'app.services.PostaApiService': {
|
||||
'level': 'WARN'
|
||||
},
|
||||
'sqlalchemy.engine.Engine': {
|
||||
'level': 'WARN'
|
||||
},
|
||||
'': {
|
||||
'handlers': ['console', 'file'],
|
||||
'level': 'INFO',
|
||||
'propagate': True
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Progress:
|
||||
def __init__(self, title: str, iter: Sized | int):
|
||||
self.title = title
|
||||
self.size = len(iter) if isinstance(iter, Sized) else iter
|
||||
self.startTime = time.time()
|
||||
self.nowTime = self.startTime
|
||||
|
||||
def log(self, i: int, info: str = None) -> callable:
|
||||
nowTime = time.time()
|
||||
|
||||
if nowTime - self.nowTime > 1:
|
||||
percent = round(i / self.size * 100, 2)
|
||||
secondsPerIter = (nowTime - self.startTime) / (i + 1)
|
||||
iterLeft = self.size - i
|
||||
secondsLeft = secondsPerIter * iterLeft
|
||||
minutesLeft = round(secondsLeft / 60, 2)
|
||||
log.info(f"{self.title if info is None else info}: {percent}% => {iterLeft} => {minutesLeft} minutes")
|
||||
self.nowTime = nowTime
|
34
admiral-worker/core/repos/OptimizationMetricsRepo.py
Normal file
34
admiral-worker/core/repos/OptimizationMetricsRepo.py
Normal file
@ -0,0 +1,34 @@
|
||||
import json
|
||||
import uuid
|
||||
from abc import abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import Engine, BLOB, PrimaryKeyConstraint
|
||||
from sqlmodel import SQLModel, Field, Session, select
|
||||
from typing_extensions import override, Self, Optional
|
||||
|
||||
from core import Utils
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationMetrics import OptimizationMetrics
|
||||
from core.repos.OptimizationResultRepo import OptimizationResultRepo
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptimizationMetricsRepo:
|
||||
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[OptimizationMetrics]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[OptimizationMetrics]) -> Optional[OptimizationMetrics]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationMetrics]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, optimizationMetrics: OptimizationMetrics) -> OptimizationMetrics:
|
||||
pass
|
33
admiral-worker/core/repos/OptimizationRepo.py
Normal file
33
admiral-worker/core/repos/OptimizationRepo.py
Normal file
@ -0,0 +1,33 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationState import OptimizationState
|
||||
from core.types.Id import Id
|
||||
from core.types.IntId import IntId
|
||||
|
||||
|
||||
class OptimizationRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[Optimization]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[Optimization]) -> Optional[Optimization]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getWithState(self, state: OptimizationState) -> list[Optimization]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def updateFirst(self, fromState: OptimizationState, toState: OptimizationState) -> Optional[Optimization]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def setState(self, id: Id[Optimization], toState: OptimizationState) -> Optional[Optimization]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getLatestConfirmedByPosta(self, posta: int) -> Optional[Optimization]:
|
||||
pass
|
32
admiral-worker/core/repos/OptimizationResultRepo.py
Normal file
32
admiral-worker/core/repos/OptimizationResultRepo.py
Normal file
@ -0,0 +1,32 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationResult import OptimizationResult
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class OptimizationResultRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[OptimizationResult]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getAllIds(self) -> list[Id[OptimizationResult]]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[OptimizationResult]) -> Optional[OptimizationResult]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, optimizationResult: OptimizationResult) -> OptimizationResult:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getAllByOptimizationId(self, optimizationId) -> list[OptimizationResult]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getLatestByOptimizationId(self, optimizationId: Id[Optimization]) -> Optional[OptimizationResult]:
|
||||
pass
|
20
admiral-worker/core/repos/OptimizationVehicleRepo.py
Normal file
20
admiral-worker/core/repos/OptimizationVehicleRepo.py
Normal file
@ -0,0 +1,20 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationVehicle import OptimizationVehicle
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class OptimizationVehicleRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[OptimizationVehicle]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationVehicle]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[OptimizationVehicle]) -> Optional[OptimizationVehicle]:
|
||||
pass
|
21
admiral-worker/core/repos/WorkerJobLogRepo.py
Normal file
21
admiral-worker/core/repos/WorkerJobLogRepo.py
Normal file
@ -0,0 +1,21 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.worker.WorkerJob import WorkerJob
|
||||
from core.domain.worker.WorkerLog import WorkerLog
|
||||
from core.domain.worker.WorkerLogLevel import WorkerLogLevel
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerJobLogRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[WorkerLog]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, context: str, workerJobId: Id[WorkerJob], data: str, level: WorkerLogLevel) -> WorkerLog:
|
||||
pass
|
19
admiral-worker/core/repos/WorkerJobRepo.py
Normal file
19
admiral-worker/core/repos/WorkerJobRepo.py
Normal file
@ -0,0 +1,19 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.worker.WorkerJob import WorkerJob
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerJobRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[WorkerJob]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[WorkerJob]) -> Optional[WorkerJob]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, obj: WorkerJob) -> WorkerJob:
|
||||
pass
|
20
admiral-worker/core/repos/WorkerJobStatusRepo.py
Normal file
20
admiral-worker/core/repos/WorkerJobStatusRepo.py
Normal file
@ -0,0 +1,20 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.worker.WorkerJob import WorkerJob
|
||||
from core.domain.worker.WorkerJobStatus import WorkerJobStatus
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerJobStatusRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[WorkerJobStatus]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[WorkerJobStatus]) -> Optional[WorkerJobStatus]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, workerJobId: Id[WorkerJob], ramTaken: float, cpuUtilization: float, objective: int) -> WorkerJobStatus:
|
||||
pass
|
21
admiral-worker/core/repos/WorkerLogRepo.py
Normal file
21
admiral-worker/core/repos/WorkerLogRepo.py
Normal file
@ -0,0 +1,21 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.domain.worker.WorkerLog import WorkerLog
|
||||
from core.domain.worker.WorkerLogLevel import WorkerLogLevel
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerLogRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[WorkerLog]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, context: str, workerId: Id[Worker], data: str, level: WorkerLogLevel) -> WorkerLog:
|
||||
pass
|
28
admiral-worker/core/repos/WorkerRepo.py
Normal file
28
admiral-worker/core/repos/WorkerRepo.py
Normal file
@ -0,0 +1,28 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.domain.worker.WorkerType import WorkerType
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[Worker]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[Worker]) -> Optional[Worker]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, ip: str, type: WorkerType) -> Worker:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getByIp(self, ip: str, type: WorkerType):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def deleteByIp(self, ip: str, type: WorkerType) -> int:
|
||||
pass
|
20
admiral-worker/core/repos/WorkerStatusRepo.py
Normal file
20
admiral-worker/core/repos/WorkerStatusRepo.py
Normal file
@ -0,0 +1,20 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
from core.domain.worker.Worker import Worker
|
||||
from core.domain.worker.WorkerStatus import WorkerStatus
|
||||
from core.types.Id import Id
|
||||
|
||||
|
||||
class WorkerStatusRepo(ABC):
|
||||
@abstractmethod
|
||||
def getAll(self) -> list[WorkerStatus]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, id: Id[WorkerStatus]) -> Optional[WorkerStatus]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def post(self, workerId: Id[Worker], ramAvailable: float, cpuUtilization: float) -> WorkerStatus:
|
||||
pass
|
28
admiral-worker/core/services/FtpService.py
Normal file
28
admiral-worker/core/services/FtpService.py
Normal file
@ -0,0 +1,28 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class FtpService(ABC):
|
||||
@abstractmethod
|
||||
def download(self, path: Path):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def upload(self, path: Path):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def rename(self, oldPath: Path, newPath: Path):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, path: Path):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def copy(self, path: Path, newPath: Path):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def scan(self) -> list[Path]:
|
||||
pass
|
26
admiral-worker/core/services/OptimizationService.py
Normal file
26
admiral-worker/core/services/OptimizationService.py
Normal file
@ -0,0 +1,26 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Callable, Optional
|
||||
|
||||
from core.domain.map.RouteMatrix import RouteMatrix
|
||||
from core.domain.optimization.Optimization import Optimization
|
||||
from core.domain.optimization.OptimizationPoint import OptimizationPoint
|
||||
from core.domain.optimization.OptimizationResultData import OptimizationResultData
|
||||
from core.domain.optimization.OptimizationSolution import OptimizationSolution
|
||||
from core.domain.optimization.OptimizationVehicle import OptimizationVehicle
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
from core.types.Logger import Logger
|
||||
|
||||
|
||||
class OptimizationService(ABC):
|
||||
@abstractmethod
|
||||
def vrpOptimization(
|
||||
self,
|
||||
optimization: Optimization,
|
||||
optimizationVehicles: list[OptimizationVehicle],
|
||||
optimizationPoints: list[OptimizationPoint],
|
||||
routeMatrices: dict[TransportMode, RouteMatrix],
|
||||
solutionCallback: Callable[[int, list[OptimizationSolution], bool, list[OptimizationPoint], Optional[dict[int, float]]], None],
|
||||
terminationCallback: Callable[[], bool],
|
||||
log: Logger,
|
||||
initialOptimizationResultData: Optional[OptimizationResultData]):
|
||||
pass
|
21
admiral-worker/core/services/RoutingService.py
Normal file
21
admiral-worker/core/services/RoutingService.py
Normal file
@ -0,0 +1,21 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from core.domain.map.GeoLocation import GeoLocation
|
||||
from core.domain.map.RouteInfo import RouteInfo
|
||||
from core.domain.map.RouteMatrix import RouteMatrix
|
||||
from core.domain.optimization.TransportMode import TransportMode
|
||||
|
||||
|
||||
class RoutingService(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def getRouteMatrix(self, geoLocations: list[GeoLocation], transportMode: TransportMode) -> RouteMatrix:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation]) -> RouteInfo:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getAverageRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation], probability: list[float], iterations: int) -> RouteInfo:
|
||||
pass
|
41
admiral-worker/core/services/SystemService.py
Normal file
41
admiral-worker/core/services/SystemService.py
Normal file
@ -0,0 +1,41 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class SystemService(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def getIp(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getCpuUtilization(self) -> float:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getRamMbAvailable(self) -> float:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getMaxRamMbAvailable(self) -> float:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getCpuAvailable(self) -> int:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getProcessCpu(self, pid: int = None) -> Optional[float]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def getProcessRam(self, pid: int = None) -> Optional[float]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def killProcess(self, pid: int = None):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def terminateProcess(self, pid: int = None):
|
||||
pass
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user