From 4af630a0f8d228a30753f291969f7219efad735d Mon Sep 17 00:00:00 2001 From: ilijat Date: Tue, 24 Jun 2025 14:22:50 +0200 Subject: [PATCH] Cleaning Posta POI worker --- .gitignore | 1 + admiral-router/.editorconfig | 8 + admiral-router/.gitignore | 3 + admiral-router/Dockerfile | 19 + admiral-router/INSTALL.sh | 26 + admiral-router/Makefile | 3 + admiral-router/README.md | 20 + admiral-router/docker-compose.yml | 82 ++ admiral-router/docs/vehicle_profiles.png | Bin 0 -> 64709 bytes admiral-router/vehicles/bike.lua | 682 +++++++++++++++++ admiral-router/vehicles/car.lua | 504 ++++++++++++ admiral-router/vehicles/ev.lua | 504 ++++++++++++ admiral-router/vehicles/km.lua | 683 +++++++++++++++++ admiral-router/vehicles/kpm.lua | 687 +++++++++++++++++ admiral-router/vehicles/lib/access.lua | 15 + admiral-router/vehicles/lib/destination.lua | 29 + admiral-router/vehicles/lib/guidance.lua | 173 +++++ admiral-router/vehicles/lib/maxspeed.lua | 19 + admiral-router/vehicles/lib/measure.lua | 107 +++ admiral-router/vehicles/lib/pprint.lua | 457 +++++++++++ .../vehicles/lib/profile_debugger.lua | 142 ++++ admiral-router/vehicles/lib/relations.lua | 261 +++++++ admiral-router/vehicles/lib/sequence.lua | 10 + admiral-router/vehicles/lib/set.lua | 23 + admiral-router/vehicles/lib/tags.lua | 131 ++++ .../vehicles/lib/traffic_signal.lua | 26 + admiral-router/vehicles/lib/utils.lua | 43 ++ admiral-router/vehicles/lib/way_handlers.lua | 717 ++++++++++++++++++ admiral-router/vehicles/mk.lua | 504 ++++++++++++ admiral-router/vehicles/walk.lua | 264 +++++++ admiral-worker/.editorconfig | 8 + admiral-worker/.gitignore | 25 + admiral-worker/Dockerfile | 5 + admiral-worker/Makefile | 23 + admiral-worker/app/App.py | 363 +++++++++ admiral-worker/app/Env.py | 22 + .../algorithms/OrToolsOptimizationService.py | 260 +++++++ admiral-worker/app/algorithms/solver_or.py | 554 ++++++++++++++ .../repos/sql/OptimizationMetricsSqlRepo.py | 71 ++ .../repos/sql/OptimizationResultSqlRepo.py | 117 +++ .../app/repos/sql/OptimizationSqlRepo.py | 139 ++++ .../repos/sql/OptimizationVehicleSqlRepo.py | 70 ++ .../app/repos/sql/WorkerJobLogSqlRepo.py | 73 ++ .../app/repos/sql/WorkerJobSqlRepo.py | 67 ++ .../app/repos/sql/WorkerJobStatusSqlRepo.py | 68 ++ .../app/repos/sql/WorkerLogSqlRepo.py | 72 ++ admiral-worker/app/repos/sql/WorkerSqlRepo.py | 83 ++ .../app/repos/sql/WorkerStatusSqlRepo.py | 69 ++ admiral-worker/app/repos/sql/__init__.py | 21 + admiral-worker/app/services/FsFtpService.py | 52 ++ .../app/services/FtputilFtpService.py | 77 ++ .../app/services/OsrmRoutingService.py | 131 ++++ .../app/services/PsutilSystemService.py | 59 ++ .../services/SolvesallOptimizationService.py | 550 ++++++++++++++ admiral-worker/buildSrc/common.mk | 39 + .../cli/run_optimizationResults_migrations.py | 58 ++ admiral-worker/cli/run_optimization_worker.py | 12 + admiral-worker/core/Env.py | 11 + admiral-worker/core/Utils.py | 125 +++ admiral-worker/core/Var.py | 46 ++ admiral-worker/core/domain/Delivery.py | 14 + admiral-worker/core/domain/GeoLocation.py | 26 + admiral-worker/core/domain/RouteInfo.py | 10 + .../core/domain/optimization/Optimization.py | 32 + .../domain/optimization/OptimizationFiles.py | 11 + .../optimization/OptimizationMetrics.py | 18 + .../domain/optimization/OptimizationPoint.py | 27 + .../optimization/OptimizationPointType.py | 12 + .../domain/optimization/OptimizationResult.py | 23 + .../optimization/OptimizationResultData.py | 18 + .../domain/optimization/OptimizationRoute.py | 43 ++ .../optimization/OptimizationSolution.py | 17 + .../domain/optimization/OptimizationState.py | 13 + .../domain/optimization/OptimizationType.py | 10 + .../optimization/OptimizationVehicle.py | 23 + .../core/domain/optimization/TransportMode.py | 13 + admiral-worker/core/domain/worker/Worker.py | 19 + .../core/domain/worker/WorkerJob.py | 16 + .../core/domain/worker/WorkerJobStatus.py | 17 + .../core/domain/worker/WorkerLog.py | 18 + .../core/domain/worker/WorkerLogLevel.py | 15 + .../core/domain/worker/WorkerState.py | 14 + .../core/domain/worker/WorkerStatus.py | 17 + .../core/domain/worker/WorkerType.py | 9 + admiral-worker/core/extend/fs.py | 31 + admiral-worker/core/extend/logger.py | 86 +++ .../core/repos/OptimizationMetricsRepo.py | 34 + admiral-worker/core/repos/OptimizationRepo.py | 33 + .../core/repos/OptimizationResultRepo.py | 32 + .../core/repos/OptimizationVehicleRepo.py | 20 + admiral-worker/core/repos/WorkerJobLogRepo.py | 21 + admiral-worker/core/repos/WorkerJobRepo.py | 19 + .../core/repos/WorkerJobStatusRepo.py | 20 + admiral-worker/core/repos/WorkerLogRepo.py | 21 + admiral-worker/core/repos/WorkerRepo.py | 28 + admiral-worker/core/repos/WorkerStatusRepo.py | 20 + admiral-worker/core/services/FtpService.py | 28 + .../core/services/OptimizationService.py | 26 + .../core/services/RoutingService.py | 21 + admiral-worker/core/services/SystemService.py | 41 + admiral-worker/core/types/AutoStrEnum.py | 14 + admiral-worker/core/types/Id.py | 35 + admiral-worker/core/types/Logger.py | 15 + .../core/usecases/Run_optimization_worker.py | 123 +++ .../initialization/Register_worker.py | 46 ++ .../usecases/logging/Log_worker_job_status.py | 22 + .../usecases/logging/Log_worker_status.py | 33 + .../Run_optimization_job.py | 356 +++++++++ admiral-worker/docker-compose.yaml | 15 + admiral-worker/requirements.txt | 28 + admiral-worker/tests/__init__.py | 0 admiral-worker/tests/test_core/__init__.py | 0 admiral-worker/tests/test_core/test_fs.py | 10 + .../test_e2e/test_routingEngine/test_main.py | 56 ++ admiral-worker/tests/test_repos/__init__.py | 0 .../test_OptimizationPostmanRepo.py | 17 + .../tests/test_repos/test_OptimizationRepo.py | 35 + .../test_repos/test_OptimizationResultRepo.py | 15 + .../test_OptimizationVehicleRepo.py | 18 + .../tests/test_repos/test_WorkerJobLogRepo.py | 17 + .../tests/test_repos/test_WorkerJobRepo.py | 17 + .../test_repos/test_WorkerJobStatusRepo.py | 17 + .../tests/test_repos/test_WorkerLogRepo.py | 17 + .../tests/test_repos/test_WorkerRepo.py | 17 + .../tests/test_repos/test_WorkerStatusRepo.py | 17 + admiral-worker/tests/test_services/.gitkeep | 1 + .../tests/test_services/__init__.py | 0 .../tests/test_services/test_FtpService.py | 18 + .../test_services/test_OptimizationService.py | 108 +++ .../test_services/test_RoutingService.py | 113 +++ .../tests/test_services/test_SystemService.py | 25 + .../tests/test_usecases/__init__.py | 0 .../test_usecases/test_Run_updating_worker.py | 13 + .../test_initialization/__init__.py | 0 .../test_Register_worker.py | 23 + .../tests/test_usecases/test_jobs/__init__.py | 0 .../test_Run_worker_optimization_job.py | 14 + .../tests/test_usecases/test_logs/__init__.py | 0 .../test_logs/test_Log_worker_status.py | 27 + 139 files changed, 11611 insertions(+) create mode 100644 .gitignore create mode 100644 admiral-router/.editorconfig create mode 100644 admiral-router/.gitignore create mode 100644 admiral-router/Dockerfile create mode 100644 admiral-router/INSTALL.sh create mode 100644 admiral-router/Makefile create mode 100644 admiral-router/README.md create mode 100644 admiral-router/docker-compose.yml create mode 100644 admiral-router/docs/vehicle_profiles.png create mode 100644 admiral-router/vehicles/bike.lua create mode 100644 admiral-router/vehicles/car.lua create mode 100644 admiral-router/vehicles/ev.lua create mode 100644 admiral-router/vehicles/km.lua create mode 100644 admiral-router/vehicles/kpm.lua create mode 100644 admiral-router/vehicles/lib/access.lua create mode 100644 admiral-router/vehicles/lib/destination.lua create mode 100644 admiral-router/vehicles/lib/guidance.lua create mode 100644 admiral-router/vehicles/lib/maxspeed.lua create mode 100644 admiral-router/vehicles/lib/measure.lua create mode 100644 admiral-router/vehicles/lib/pprint.lua create mode 100644 admiral-router/vehicles/lib/profile_debugger.lua create mode 100644 admiral-router/vehicles/lib/relations.lua create mode 100644 admiral-router/vehicles/lib/sequence.lua create mode 100644 admiral-router/vehicles/lib/set.lua create mode 100644 admiral-router/vehicles/lib/tags.lua create mode 100644 admiral-router/vehicles/lib/traffic_signal.lua create mode 100644 admiral-router/vehicles/lib/utils.lua create mode 100644 admiral-router/vehicles/lib/way_handlers.lua create mode 100644 admiral-router/vehicles/mk.lua create mode 100644 admiral-router/vehicles/walk.lua create mode 100644 admiral-worker/.editorconfig create mode 100644 admiral-worker/.gitignore create mode 100644 admiral-worker/Dockerfile create mode 100644 admiral-worker/Makefile create mode 100644 admiral-worker/app/App.py create mode 100644 admiral-worker/app/Env.py create mode 100644 admiral-worker/app/algorithms/OrToolsOptimizationService.py create mode 100644 admiral-worker/app/algorithms/solver_or.py create mode 100644 admiral-worker/app/repos/sql/OptimizationMetricsSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/OptimizationResultSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/OptimizationSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/OptimizationVehicleSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/WorkerJobLogSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/WorkerJobSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/WorkerJobStatusSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/WorkerLogSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/WorkerSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/WorkerStatusSqlRepo.py create mode 100644 admiral-worker/app/repos/sql/__init__.py create mode 100644 admiral-worker/app/services/FsFtpService.py create mode 100644 admiral-worker/app/services/FtputilFtpService.py create mode 100644 admiral-worker/app/services/OsrmRoutingService.py create mode 100644 admiral-worker/app/services/PsutilSystemService.py create mode 100644 admiral-worker/app/services/SolvesallOptimizationService.py create mode 100644 admiral-worker/buildSrc/common.mk create mode 100644 admiral-worker/cli/run_optimizationResults_migrations.py create mode 100644 admiral-worker/cli/run_optimization_worker.py create mode 100644 admiral-worker/core/Env.py create mode 100644 admiral-worker/core/Utils.py create mode 100644 admiral-worker/core/Var.py create mode 100644 admiral-worker/core/domain/Delivery.py create mode 100644 admiral-worker/core/domain/GeoLocation.py create mode 100644 admiral-worker/core/domain/RouteInfo.py create mode 100644 admiral-worker/core/domain/optimization/Optimization.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationFiles.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationMetrics.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationPoint.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationPointType.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationResult.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationResultData.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationRoute.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationSolution.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationState.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationType.py create mode 100644 admiral-worker/core/domain/optimization/OptimizationVehicle.py create mode 100644 admiral-worker/core/domain/optimization/TransportMode.py create mode 100644 admiral-worker/core/domain/worker/Worker.py create mode 100644 admiral-worker/core/domain/worker/WorkerJob.py create mode 100644 admiral-worker/core/domain/worker/WorkerJobStatus.py create mode 100644 admiral-worker/core/domain/worker/WorkerLog.py create mode 100644 admiral-worker/core/domain/worker/WorkerLogLevel.py create mode 100644 admiral-worker/core/domain/worker/WorkerState.py create mode 100644 admiral-worker/core/domain/worker/WorkerStatus.py create mode 100644 admiral-worker/core/domain/worker/WorkerType.py create mode 100644 admiral-worker/core/extend/fs.py create mode 100644 admiral-worker/core/extend/logger.py create mode 100644 admiral-worker/core/repos/OptimizationMetricsRepo.py create mode 100644 admiral-worker/core/repos/OptimizationRepo.py create mode 100644 admiral-worker/core/repos/OptimizationResultRepo.py create mode 100644 admiral-worker/core/repos/OptimizationVehicleRepo.py create mode 100644 admiral-worker/core/repos/WorkerJobLogRepo.py create mode 100644 admiral-worker/core/repos/WorkerJobRepo.py create mode 100644 admiral-worker/core/repos/WorkerJobStatusRepo.py create mode 100644 admiral-worker/core/repos/WorkerLogRepo.py create mode 100644 admiral-worker/core/repos/WorkerRepo.py create mode 100644 admiral-worker/core/repos/WorkerStatusRepo.py create mode 100644 admiral-worker/core/services/FtpService.py create mode 100644 admiral-worker/core/services/OptimizationService.py create mode 100644 admiral-worker/core/services/RoutingService.py create mode 100644 admiral-worker/core/services/SystemService.py create mode 100644 admiral-worker/core/types/AutoStrEnum.py create mode 100644 admiral-worker/core/types/Id.py create mode 100644 admiral-worker/core/types/Logger.py create mode 100644 admiral-worker/core/usecases/Run_optimization_worker.py create mode 100644 admiral-worker/core/usecases/initialization/Register_worker.py create mode 100644 admiral-worker/core/usecases/logging/Log_worker_job_status.py create mode 100644 admiral-worker/core/usecases/logging/Log_worker_status.py create mode 100644 admiral-worker/core/usecases/optimization_worker/Run_optimization_job.py create mode 100644 admiral-worker/docker-compose.yaml create mode 100644 admiral-worker/requirements.txt create mode 100644 admiral-worker/tests/__init__.py create mode 100644 admiral-worker/tests/test_core/__init__.py create mode 100644 admiral-worker/tests/test_core/test_fs.py create mode 100644 admiral-worker/tests/test_e2e/test_routingEngine/test_main.py create mode 100644 admiral-worker/tests/test_repos/__init__.py create mode 100644 admiral-worker/tests/test_repos/test_OptimizationPostmanRepo.py create mode 100644 admiral-worker/tests/test_repos/test_OptimizationRepo.py create mode 100644 admiral-worker/tests/test_repos/test_OptimizationResultRepo.py create mode 100644 admiral-worker/tests/test_repos/test_OptimizationVehicleRepo.py create mode 100644 admiral-worker/tests/test_repos/test_WorkerJobLogRepo.py create mode 100644 admiral-worker/tests/test_repos/test_WorkerJobRepo.py create mode 100644 admiral-worker/tests/test_repos/test_WorkerJobStatusRepo.py create mode 100644 admiral-worker/tests/test_repos/test_WorkerLogRepo.py create mode 100644 admiral-worker/tests/test_repos/test_WorkerRepo.py create mode 100644 admiral-worker/tests/test_repos/test_WorkerStatusRepo.py create mode 100644 admiral-worker/tests/test_services/.gitkeep create mode 100644 admiral-worker/tests/test_services/__init__.py create mode 100644 admiral-worker/tests/test_services/test_FtpService.py create mode 100644 admiral-worker/tests/test_services/test_OptimizationService.py create mode 100644 admiral-worker/tests/test_services/test_RoutingService.py create mode 100644 admiral-worker/tests/test_services/test_SystemService.py create mode 100644 admiral-worker/tests/test_usecases/__init__.py create mode 100644 admiral-worker/tests/test_usecases/test_Run_updating_worker.py create mode 100644 admiral-worker/tests/test_usecases/test_initialization/__init__.py create mode 100644 admiral-worker/tests/test_usecases/test_initialization/test_Register_worker.py create mode 100644 admiral-worker/tests/test_usecases/test_jobs/__init__.py create mode 100644 admiral-worker/tests/test_usecases/test_jobs/test_Run_worker_optimization_job.py create mode 100644 admiral-worker/tests/test_usecases/test_logs/__init__.py create mode 100644 admiral-worker/tests/test_usecases/test_logs/test_Log_worker_status.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..723ef36 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.idea \ No newline at end of file diff --git a/admiral-router/.editorconfig b/admiral-router/.editorconfig new file mode 100644 index 0000000..91fea9b --- /dev/null +++ b/admiral-router/.editorconfig @@ -0,0 +1,8 @@ +root = true + +[*] +indent_style = tab +insert_final_newline = true +max_line_length = 150 +tab_width = 4 +trim_trailing_whitespace = true diff --git a/admiral-router/.gitignore b/admiral-router/.gitignore new file mode 100644 index 0000000..da090fb --- /dev/null +++ b/admiral-router/.gitignore @@ -0,0 +1,3 @@ +.idea +maps/* +data diff --git a/admiral-router/Dockerfile b/admiral-router/Dockerfile new file mode 100644 index 0000000..ec4c62e --- /dev/null +++ b/admiral-router/Dockerfile @@ -0,0 +1,19 @@ +FROM ghcr.io/project-osrm/osrm-backend:v5.27.1 + +ARG TYPE +ARG MAP + +ENV MAP=${MAP} +ENV TYPE=${TYPE} + +COPY maps/${MAP}.osm.pbf /data/${MAP}.osm.pbf +COPY vehicles/${TYPE}.lua /data/${TYPE}.lua + +RUN osrm-extract -p /data/${TYPE}.lua /data/${MAP}.osm.pbf && \ + osrm-partition /data/${MAP}.osrm && \ + osrm-customize /data/${MAP}.osrm && \ + rm -f /data/${MAP}.osm.pbf + +CMD osrm-routed --algorithm mld --max-table-size=1000000000 --max-viaroute-size=1000000000 --max-trip-size=1000000000 /data/${MAP}.osrm + +EXPOSE 5000 diff --git a/admiral-router/INSTALL.sh b/admiral-router/INSTALL.sh new file mode 100644 index 0000000..6cb92ea --- /dev/null +++ b/admiral-router/INSTALL.sh @@ -0,0 +1,26 @@ +country=slovenia +port=5000 + +set -o xtrace +mdir -p data +cd data + +if ! test -f "../maps/${country}-latest.osm.pbf"; then + curl -kLSs "http://download.geofabrik.de/europe/${country}-latest.osm.pbf" -o "../maps/${country}-latest.osm.pbf" +fi + +for profile in bike car ev km kpm mk walk ; +do + mkdir -p $profile + cp "../maps/${country}-latest.osm.pbf" "${profile}/${country}-${profile}.osm.pbf" + cd $profile + osrm-extract -p "../../vehicles/${profile}.lua" "${country}-${profile}.osm.pbf" + osrm-partition "${country}-${profile}.osrm" + osrm-customize "${country}-${profile}.osrm" + echo "osrm-routed --port=${port} --algorithm mld --max-table-size=1000000000 --max-viaroute-size=1000000000 --max-trip-size=1000000000 ${country}-${profile}.osrm" >> RUN.sh + (( port++ )) + cd .. +done + +read -p "Press enter to continue" +exit 0 diff --git a/admiral-router/Makefile b/admiral-router/Makefile new file mode 100644 index 0000000..c1d2598 --- /dev/null +++ b/admiral-router/Makefile @@ -0,0 +1,3 @@ + +init: + wget http://download.geofabrik.de/europe/slovenia-latest.osm.pbf -P ./maps diff --git a/admiral-router/README.md b/admiral-router/README.md new file mode 100644 index 0000000..85f5f61 --- /dev/null +++ b/admiral-router/README.md @@ -0,0 +1,20 @@ +# osrm-backend-nginx + +* bike -> bicycle +* car -> regular car +* ev -> electric vehicle (tricikel, štirikolesnik) +* km -> kolo z motorjem +* kpm -> kolo z pomoznim motorjem +* mk -> motor kolo +* walk -> hoja + +# Run on dev +```bash +# First time and when you want to update the map +make +# First time and when map or .lua files are changed +docker compose build + +#run +docker compose up +``` diff --git a/admiral-router/docker-compose.yml b/admiral-router/docker-compose.yml new file mode 100644 index 0000000..ab1e3c7 --- /dev/null +++ b/admiral-router/docker-compose.yml @@ -0,0 +1,82 @@ +#We creating a container for each api endpoint and connect nginx to endpoints to creating a single api call for our endpoints +#ARGS: +# TYPE=(car,foot,bicycle) +# MAP(default)="iran-latest" + +services: + bike: + build: + context: . + args: + TYPE: bike + MAP: slovenia-latest + ports: + - 5000:5000 + container_name: osrm-backend_bike + restart: always + + car: + build: + context: . + args: + TYPE: car + MAP: slovenia-latest + ports: + - 5001:5000 + container_name: osrm-backend_car + restart: always + + ev: + build: + context: . + args: + TYPE: ev + MAP: slovenia-latest + ports: + - 5002:5000 + container_name: osrm-backend_ev + restart: always + + km: + build: + context: . + args: + TYPE: km + MAP: slovenia-latest + ports: + - 5003:5000 + container_name: osrm-backend_km + restart: always + + kpm: + build: + context: . + args: + TYPE: kpm + MAP: slovenia-latest + ports: + - 5004:5000 + container_name: osrm-backend_kpm + restart: always + + mk: + build: + context: . + args: + TYPE: mk + MAP: slovenia-latest + ports: + - 5005:5000 + container_name: osrm-backend_mk + restart: always + + walk: + build: + context: . + args: + TYPE: walk + MAP: slovenia-latest + container_name: osrm-backend_walk + ports: + - 5006:5000 + restart: always diff --git a/admiral-router/docs/vehicle_profiles.png b/admiral-router/docs/vehicle_profiles.png new file mode 100644 index 0000000000000000000000000000000000000000..fdbd24ae75334904622ccf88dda8dbdaf244138e GIT binary patch literal 64709 zcmcfoWmuGJ8$SvUB3&XNNQy{zHz+CH-QC@d(k%_r-Q6wH-QC>`Jv94ft@Z5pzdyYD zt3aelld( zN%$X`JQ)rgK~0yd5I)FgTkONIM)_w3iupR-8_ZVdRY7=SZ@R<5y&*^fhk?`1rCP0{ zlS~jXoi38GOtgcnBX?5+_FhbN0rWcocw&Fxuk*_<2zmeg5=zVB-}MuuF|vsNJd@ae|GgMRz<SM) zU2TP_ox)_Cymp_ zOG5AE=7^*6v>`CKtt2St(WvPz|42;oxQjpifxPbenwKE*oq(q8zaMwBdhy;p!K?kJ zm6;jxcPUa%%Ax{6E*qF6NuG=Gr!*>V_icDGX`Hc=igr6?4KEpTGT;uz|6R6Jk_RR< z(63+P1wka7J|juqoVZSXB=(Bpq?fzeBavwbn3z~;*o+9o-2va*34lcmcF4`@Tp^uW z(vXpKWMLo95Y^Xz^P+W&h;ogh={|sISuc|Ab?PP{Nc*@u$G&C3Fg|bEwWPbg1fH#Y z45NpFxQ-;gT)wGua#B=M*|2XuW5hYTyxeMGwQxHbX=u{xbdvndkiOW=rR?*B(iMb4 z%tMp{%+2-R~x!O^P)$KU3CZh!z2r=MI;!saa6rcWFc{m$nZ|=o~WUDtKudy)r zI92cqDe30kQQt<6s2lhRZNB5hx1dx$OZU(ykwNcKRnZ*9JuR~I*plydALHWULXJ9q z`Qj+$Bd0zG=;dyXpMsix@0>it+G&ZB@(BSRUiYASY&0dw|J|m*zg1`#d3%2kby}6s z6ohtplikB}IgO#ocP-DvjP@%%`kQ@q3wLQo1$_XFFwb3Y3P-9mU6U&lyEcD@--4PU zTZ5zZtv3mM+XXHN5$18{yzl(tVzXw48sus+p+CI)RsI1|P~8061)Kg`$&7v1<5A;z zAk6W6RjhppmTzUHtg=_#@46#)knLESi|yP=gWVDMG{35Y0&H%8dVPqAL#czF5a7sV&??He)Za&N?Fxv`4pX}D$^e^9t!#(oZLy)_oaJLnh1 zBVuMo`Fd+@4_En<<|hnh65^7ePwbF0H;h=I=7MvFN`T6>i$uox_2#R)`;c8)3Y16HT2!7W?9DZk-swzg$Dtpo% z(mdM32G)f;>k+P-lclk8t&cPho-pR|q5b{vGRXujlM9bq%40`f$;qy6Zh8m^2euuD zhll;cJYRuZJZOPb+HRDX)~V-fIa7kja`+YUC5=ZGuYpellR06$bCmA1qsw~!~4OVlum&ka^ zJxPmvf~zSkFDI*N!eT74(NPq~YHDht9gNo~i;Ih!tU!vzyt}&t$*RmXC2NfdO9D$t z9P>{%H==%|T4nfm zz{FYxwvPI|F=RUO>Lle)9=498uI?u(HoRxD7rGe-&49Z14!WN}`5+7sD=TY3TuE{H z=g*%XYmeqUnqu{WR=odPi+_4!aLsm_F!+3B@w%WbT|95IN|Z1+w=u4R?~>j82oD|! zWv9vH#a(i<6;ilFIb5BebLORH zMA7e9h5K2`X!$)fn%1R(V?62&-U|dHMgP<9yWK?c>((UMKCuQs6%~~(RsIkh z!59MN=HK*Lq8?t|#EC0izBCu8(Xo7a8#~c|8omNLUTHuJn)+s0k@R8bO}NO?qi7gc=d zOUt=?Ao9oXsLrq7zcbji=@*q%j(r;D|I-iGrO5suH7YG)vl zBTF$~YlZ|oBge&)CkTxwtF*Os#w(q{n>$cQF52?{r zc!@&;GY-ZM191bAEuK_6Mmw#ylLDckp}aeTL)17N4Dn(ZeHa)RBR2%x8fm75K;Os? z!QgPFiSkoZ+Y^@5ivEo`SGJe=Zk?}U+a{Mo1@p(Z&T^QVf^~v3ZJEB${_oP1Rrqw; z8@fN1P-l!~=aqPHg=f_Dz1%WrYp?R0o#pI#F3CLKF&;Z}`*>d*T$1Hay&@n^IN^p^ z4-Y#!u|$+5GD zprUR@2c44S`PfHwpKr~wJl`zy+i|A-TPj(y|3Vx8TQ~Q+nK?J$$Ab$QL`7ZVdrAHJ zZtwoZF0Z%!|0xhF^xy)$zNzErz8nb-lj}Q@KooZ#>o7`=>+{kpKZ z`T8#N3r*C1;EuKagt8?N4zv3gk{~pbujYu-iLqB#E3beKkKKx|?BwwvI%xdB>G51f zX5E%+thXGVxcOgH**(_XOT?xir)Sh%smDt&NI7eDoBr#U?(_Mvj#`T~Z;nXBCtfee zN77hO3*=NdOP0ZXJJ>o@F*OAE;`UnVj?}96t%{3_-A;90tk0hzc@Vpv$vE@kh4+^L@yHx%_@BLDH)Ci!) zyYoI4rki6=tLw!&+VwfIPu@Vk6$rlHuS$w=O;l8$|(>uotuM*s;;iNThEc{9~!c$d6pvyylh#r=`kzr z7eJTP#gGzt53hP9BNKA?HmQt#D^qq@Dr?ZSG2d*1@SlZ`a4)brlpFTen&p}t8y^%5 z^$(5`vrPZB@%CbFhlP%aU~5HzZGW;x8S1#LwjlD|ci!008%|?Rnb&T!Bj;V7M@ryUGYGx>=jU;pPZtB^^RiVOS#To) z!&f|B@!a6x5bqAk>Z&HQ2^%zAuD%P`MDa|n)mmo|5q(~L`6&${7P0s0e@I>dX?M)v zLKwsC2K;j22qkrx(oaO>T`Amjce%@Cy{wwX%uuf@6}Lc_;}Jt884 z0`u&_l*m3 z3mPrD-hfFRv=w9c$AX(si&M6GrZ^ zb$!X1lqPJv`(ujZV$+f#W#sX$N_3%_Hi8@6h8EQI&HTmC+h3D%P(g{b{zPH zid4XF-f>i}w^e;b<>ka2{li^od|xdsX?)x_>G!Epnj2z`kWLx)Ap4 zuv2Imh#%M=i=&KHt8wP#46=tZtzB?^ifA~$xzOLeDzYlqke3&C@3kH~vZ!dnZPOlW z30t+XyKFU3aSwazpkqv?t*xypAOON)(FIXH#oienFolLnfL!L5BU0*JXFu{;Qt0XF z(L7jkMzG<50Jsz(lB0K=G*Mt@xLmUm7!Y`In%zU2fQ^NHzHot+E~d%E%nY@Uh2fxn z9X87NF{1T(aH#Bg>gcKB^k|+~%_RxENoR0-$duqd~(r;l0ica}#d!>3{!z zwnrn%FKmo+!fQ8KFd0HXLNe&C%mN`3FeL#+%BS5Net3ur3Y8GQyN%83?zz#Q|J9ks zM<<|Znj7%St2xj}Arb;biq$QC3C+3fUUHO5+q=8$y2Nc z9_?+}a=3z@NDErm8f@MqYI`#N`0;~f!SXicQKJ_ZXCA+#uIFO$9=84d9uM>waq4vX z^2E!bMNSBx}5_ivQ}ZC?;9OmZ@-7w*Kw1PkkeI)M$9 zyR`2>cQlEpjGDI64qu;C{*49&Hy4_WsBnNsO7E`ngzxTA;bHF4zN;k^)JY#%cYx$S zs>LQxu6&bdUSV7sep@=4>bpUcfBK4K{9W$!Y;sPe7=nwDIHLc>waw3tI&|%4M+RiesqwrWQH_Q zMSifTy7QF@%-Z>RC!ZW%yp2bbdY1trtWtp!F2XNB`@y}h2deU}uPiPh!G0#bCW*z4 z2iRaW3z7n0*Ef>+yIyuto%l?qpMF3=0e7Ym(UO(*0~p;xK=sGh7=wN#CPuC3nOq3R z28V<=M>V7{H7WF|r{WEDXo?B?MI7zv9Nj^E;-n0zG7zb9zh+mgW!7$F-qSMyWUk>< zwg|5WriJ)))IasLC)?ZG_A^Q0L!e?VW9q#3jU6 z?etlhPMeD$X;#SELr4UFW!fD_?Z48ImD!-A2ok@+`>CKIUUc}#r`OQPGX0c z)1`Z&7g0XlZVpwb-XBzdSo+B2`c6(R`q5@#50u7*HZ(XS6QWY2XungMj3Y%Rf=V

DsGk{&?pu$zreoiJef+5hPNmp;VBCc8(a8%wFI1o!_Khb@Dyc|DDUD1B(Ha#*H zu)GxJAcl^Ed92Rhfw&3m?H=bR7Ku(`tm zkhyMWLU24ZidmH*u(&Qbbe+1`Ll{hM_hzO|_!gzyisBE;5Z+T6*5`_reJSy@?$XJO-& zO>XB*FzlXOdK{2Gmz7X-k`NV~ba3b6-rKd3F8PKcp6jYvx3!I=x$U%mdWK*}cXnuc zW+$!2DqUjoKZ)F36MXBMN=NQ-b4<#|mtd4pw(m-MUL&jF^0tjH>C>etYOz<6m+wUD~2{=Uf7`iMmyiyYp2tGL#sEd|{7| z^ze0>_On|+t$9>4+1>w7#OI~sHDGU8bFyieQPC^gF6wx0TROxr$#lor->tV=n% zRPPWqa$-%0f!h!T<lwHdbY7sU5^!H$8oZA%B{TfK$_e5?qL@fR$32Vo1?@3XIi8xDJj7f z7LD{SaU=S?c+;*T)oF^GEg1j)UA(BNQ-Ean5ZUs5$qu^(!V^Mj$P|rU1LYF7mEI;J zC*O>oCmf9ia z;ILhXIlfVM?^*t;IL$geyBm{si3b#DW`Us1P$~jZ+LAOTf^?zF+ z&oDtbk++Q+6K2pnMkuT3_(KojR%Q(~v?pwbN0(xU51**w+qe9D62$)J%5WTAASpw= z!*x$KNfyZZ!efc+c6TE4CpP|x$=QPd#99|rxnyy69%3U$#_d5YofvU@%PmV0_vNZR z<+akUxi^{J!UEsL-D$U(KS`C0ofM9>XKxoo*2E`#-CDu=*86y8@Uu~AwEFDo$ zF8Gpe>(dZp+tGlTKYup1JKiThDKC&e2nlaRjI{%S=n}hPf2GsV`@j%)V(T&GSNps; zthA$<;OeM+jCevoOZlOV?cZf4Sil~FfotiuG&)KXs-*Ixw!)6s}rr=jQeak;H1;7=r;vToo>4`_*nz-uzjw{}0hWqQT z1Iyga=x!pQmAAKR2zCz z=a)wkY70^Obw6(JAi@<$0$H-})*hUv&2^4%%z+9VH+_V!lkDvyNA-F6a<%vs7dI?8 ztnFhCA`dlxgYl!U{S#``&BdUkt!UUktRqvrqcevhg? zbuXY3z2G_i{)%eY_FX{%%U$ae0s=%{yDBY5G;tF=mzbL?3hcoz29O-u{LiP-xDiBx zupPnD7OJ?$|hKc9-p`6ZgzPn(bW8p*C6*q0&UNfE#m z(^T}wkzAOFq;w`2Gpq~Sdli|vS3wTA3g6F!hJTR2BjQTz{_v4ky&d6Pc@7x0WMkG_ z#vzkV^_{ZbbAdc&6$m!#9@MIrx9XGm!J&KtJWvhm7Qe&8-*j_{;|1)3uqcL$I|^zY z8y?!;*G@69F-Z)1_&K7H>m5is8xIjFi<%t(Feh|7<4f?q|46O!#Pq&rHgcfJ`CJ&I zY4?zm=KwYV0U(0HeqtpBTr-oi#}1v`>f*yV_Yq@eY$^P)+97&_y>iF&hdHD@Po03Iq z#RX9PKs#96woR$?mdfVlsObyR2H(9r_Jnf_#D5WWAX?J2@XmbiPD+>gh)>#)VY3|MpChNh+3HLWd9Q~VU zeCAb_kkz(QjfO0Rqm*zFPiSi~WjsHx;&~XJ zT6TT%O!vB{e{8g({^)T7(zP((+)>dnS*@Ep+ZRN$g@8}j%ek%hPFz-Vs*8(f+$AI= z?#{OISlHOo9?~#3O~lnHJd$3=ilPB3zOd@P(KdGJ@$5Zy$xTev-q?QakLekroN~Uk zOKq_M_j<*X&eKA$?boYh`=-&;QbqB+Ks`a7o()s|qk4_e!+L*?@~o`jw&WMxTpvVW z1?6NbFKR3DR?iz0)bF9OW(iCv2g?yqaf3SFb!9EPcyPrb#Rx!+@jYRp@!jLR3cp?G z1$qyWX^$J7L4faacp&R72RP#fvsS8ORR-jz2FKFmFZYlzn#NQ0M^CR;NB!1%r1`^9 ze0=L(4NRQ41yYB1suh%CLpXKc^6{OkmZfF9{Z@eT zX__fuvfMe*X`!WFI|x%qSW8fCA)!X`g+8du$vLJiOEB;qI=Z*=;fJ&>Tj!^mk)a18 zGIA4p$x+uORM8SK9WO8FiuPyamLoz=_wbiTmkqNSq(5ryF6f(y0`CV2oq-wr_!NtP zj1wweL|z1b%C+i%Q{iS0rF_chmLLig*$R>`${ElW3vj{?vR}=eP(GzBn7Bp9;LZ5+ zXTy}0EOitX7Z-70zzeR7?bqsghj89B^r>|tu=e4EcThn00%6UjDJq}z;B(;9SJ(ou zE0XZZA7BPfjXHXIi5P@zD8R$1(s&UOq-6;T6mderqr@DypIcjLIMPrQl}}07AzH;2 z(!=g1Y*C-N%;{EF(Hk|Taq%SjN4+DOo4c+~9^QGkBY<#PNs<^&C5nrg7TPaYr3os! z{XMGuC1IBp_*Im_2799qAOPv&WhGE@q_H`ly8HfK6vT-b=oZonin1Y!L&Tb4RomWf z6*HPyMU4u(HL_=sG0jtUb4&P$EO!;!1f{dK+Ng<|ii!slpkqg4^N}tQ!(rXl{TjfZ z@`7vK54O#?1EX$!F)?6`l|TUQeSQ=d5G=_2DyE>IRH6{mGBomenh7RAz)yTfSjmYj z$bK!q{-(R);>Qoc1FSYsfP9#EUe~Ot(O5gVY!PyI-%4my7Ph+<12HiMCFR0*=g}iq z57A%ad)6%GI;o=~ic~}wNbTGERWhLOl8ty0!4K|~bZTP&x-G1r|IWW}yLiMTu&* zRBdRFy4`U1YN+Wdh7!c{64e29RzRNqnil{-^|k`UAL-cZWwd-^b=7I+s!`%F{;xS!;mTK1oH#CoCpL9?)3qPFNk_Q4s{P z6r*Ef0fxE?ywW8zOz%zTMmjh6k0o1;@d{TqNe%*1&A6$>b`TNU<#P~`LBmo=NaSwd z-Q{;l$=4x~mDT=3f!IFt=<&s;wCFj+bb?D!}r61D>cx*v6 z#?pO;O!7)<5hJTjr;&5RO)(#*@}twMQt~qBa@yO|sVkBO3eibaYpk_`LPqaOa||w0 zbY6%sdLwd*ib9CktTv{`_;Lyx{gaYR^R<}*7;{9coS)ps7+>viW)n%0Kc+%?$_n!8 zgC-djx(}!>DmxP-qP9C&%m_eOA$@Ia%omDMGL_*6@KONRG>}YTi|^+1QV-X&0&{WE z0lKq_HfPwc^>=@Gck3$)e-)%Z;}sQAmd$#kP95k2(!3@hV72M%Q_xaxMKrAHf!l{` zoRMk2r!G`01uq)rn z2o$hZQ9q%B0L%u9=1JJLDj+5G;oxIbred+!u>-yKJ;TLC;?@?1pwN8j2PYe;2ihEw zRy5xV85?u+YD(G_A;WDYU?2cHLBOr&##uwoAiIK;%pD4(wtz%QyX4=Y!{mhl=FYRs zSaku&XWae$n|kXfTH*v?PmlDN9?PgN5=_tR8L;G!SAe_lIq%VzT$UpeRX7m5PDM(P zGXFNmu6U&DZl4KLU{V1T?zL@vKyVZ7vv~QjcMg&09UA&MwDZMbjXB*3&A3fccbx&1 zz&!%Km(B#xGU)qm>X(;t@LZSSb8spNq{@A%1T+QJ#w_ZZKA+J`$_=66`>PUrjsw53IQ zGkA<5dNE;hd8Vp0k1Hxw$!dVbEG|LImOuD@`0x%I?$OW6%w>FBHat;<;_04maCO*( zf_BAUKI?PQ8VLoKHlTzUy!`NCw7+fH-l4ad*ikAu&uQn&NP|o7+p&L-&gXaAxF)X^ z8#2?JGW;}_GZ&GMxGBCqg#GH$!7wnoRn9*}4KBa(o?&twj7 z9+#JmA0M=|4Lo`-qW01YG7I6;r28Dk_(n$9SN_rImCK%C{W^n6%2Rs)_Zny*C||9# z2*9+@zJMBh_BV`PK}$=}Rh1%&?-5keaTiIR?hOmP1mIf+!+~3Z9*8`rV{e_m?8(vr zbMgZkzbg@t=L6;>H7J~jJ@DfcUEk1Bii);}-*Q)wo5zHHy4xWFU4}blax&cavHnk- zoCv-e*6ktF)6YE5dj%9UbjCn0c|5(*C@Lv4*vcRAe+zg1&;s6Gqi^p9B-+Aw@zdrb zm;E~T%oBfmK;3-2Nzgfu^5K(Nj7;-ar{THo&KNJ8X?>nu0wLpZ`M3DEa1+r#5e@O_ zoSMa{SMQ&oLe~Co&yn1C%F}GaA{>`7TcHo^Ig@N~F=%1){ynDTYc^4%K78{%1P#5Z zDd_H|g%!wfdc+K53BuZqrFCr0%&i%o+@nxnA-)RQM5j$*K5b`2_0~(HfQfW|3E8?o z+O?aKnv0E(mr{0S6VLluOTb;V@Z0saGXa8rq9qD?x+l9>hlW&n_FzArEbQ4fYYGXA zoXzK;<;YSll|mYUyk4qCL)Erl6g;3szgRb?1A1e;R`OXl&7nnI$4^C;dnp?AjPI#5=;Qg(Q%%uZijt@AYSMRkdi=04%9@?gIfs-O;~jW76Y z`~VZX!i@cV`rys0PWP?Tytrfq!-^9>0-xjTjd}c!{$NfJKxDHN%?nE%8;ZHLQ0?tk zvj?oL*dy-)-$dfyy-OTIXe6h8b9Oi2BH;au?fcikipZo&GCxx{e$UBqyG~KbNPcm- ztiGHtD)hoiRe(+)i{r)nD3#NI3)Y5y)zc)JJlS$2+$=F`?#LC~2H_1^FArZSU8{_y zR3O99b(DFWsIYk-gu3IdK2Ys|dOkBpFgE>gu$Q4AyDdG9hle*XFhEL1)_eK|2B8{{t$L*|$j}3OM?5}8CXx1Xy3Y87>u=ZJgV^zwSkb(^(qsG!B*cDt z3SA({6Wi{=zSS2={KwE-&iW}ZqP519F{9!(#_6b?oPpsNV20E$%ft?$$|)i}zG;fT+rO?xhuw8uffwQ3q`B-C;6z z2!q933pty8v!>_$f<0nbXcG5cvBII3&w!|B>y^T)hYvzfRP7VAv*_G})8($boc1WW z1poEpcl+&qM%0~MXGeGt1r3!x&>xQZlX}on8qR>YOVWt3r>7@ZUHdz1L@|mnjGY}b z{sZyiPsAILbmgd60JyVj(ZP{_a;7`>piOYau2T*~U;nA1olyG1v+jfSES0 zU>mx7AkUF+ZrM;$KDBO;&hcT&w4JkYh}x-LI^v0v0c3kL-;%pPp8B~806by?iff#T zWkD-XnL?Gzt}7O%cR<$y=xgB)Qyd}$2p!ln1_-aYzPHNB;3m_Eedt}0KR18su&TC{yY}HaY1Ge7fZo^rcF>DdYDKS0Y)mzY=xEH`^vB?SUu?RHbrbheL&qErEA? zJ7^mlvo`p}5Qy(4tgkB{(hBBG04|b{?XQm#-{CzVr`%VVZF0)IfuFVrqPB}D?+rL)R2<=Dn2IMnwaJ_HWT-#@1(QwFJX; z{9H7VemiqtV32+wd-WZr%!1qfO|HIW!0sOwk*8rxV7dNOcG8kcGo@3j@eY z&K|roGY<>N&!G5t4<7K5;}1Y_ORr8J*=?tsHHVh5@Nn09_ce@hvTTA)hne51?&Y8! zg^v-WO6?-2egjS1i2+Da0GIv{N%)XdTnt@MI`MZ8YcZw_2!;dURfH)!! zr#+6gJN<7DaJn3&oqy7j51TfyhzO8myd8Ma@Nv@^4u!?#If(-wb+&xz&+x$%&adAB0CYjdWAoQKI7l~K!(4yH2MD_to2k-e z4!0y32G;Bp5!2+V6DH37DT00(Pu(h(jw3kyUO;pIOBPQQN1D{}3WOaN%^fOUUR3wb znZn?gYcCL>TLY5ulFO=uloXpFY}(lW$#>fs8 z>0Q2YnCYJP$!r>}WHP_|UjeVf%RT#2eM<}9n%hGekZNiaZn|1&p#(tZauOq;@L}{u z5sWN49ck3EQDVx9AiOAwK(S z6+{NT;fC918_2jUzCMqaZq9iP6L|Set3+%z8)bZ9$YxwO(zNrkC?hnT6Z5I)dm%b3= zBVKY1Y4zbc9+|6Ao_n5`&~RwObvjzI<~d;e21r_fR!hD#Y29^Di%!UvGt&I@be}l~ zqo8cxuN=4z8Jej*&>qe1CQ#=19IX)gP#KI7 zYJ2ZE5xVZnSw=|FlIe1)#TTsBLoRP|h9lMGd=6vthQ2RV#m1MkXY0u80Lg>d11BF; zX-E=qVj~8SEC5#XC7RZ472lv|%`=ai)e_UZ$JQn8HZe%sUWO!q@)=Qkmt zTK;xh$SE(tei?C)X%on=P9wSwbanC{?4VVKQ%~K-c78^TL{X2Mu4Mcui9gq(3Migh zk((>miCVEXS#50sva1Ev)kf_5esDEskV|qpwtR(aCt-d4&(1ArUdazjv@Q%oyt^*r zf1s=qJ~DdZWPJ$RbNVKSuEx1w%2MFm!dJd*o>yBM2c#D#%rxi|GZJM0J`Chfcw?^| zbL&S}@OFHL5)}06m|mJKCJwv__v}m@cal6WPh1M)s7_lifyQpdDI*wA7h~Y&)?I&L zAeOOuzP+>Obi1EeU+Y4kR;GjxXvxn5PmHb}E=AtNLVJMZEr>ZXAguw3en5Ln2~_Uo zpPeOu{WC|8&fmdz`#_pyZFG^o+y!=1AHFAl2?6X1z%8v*r6Z#-m1I8^lK>!KXz0oq z%H^fR7b<&Hz!ppz+2S>d8)45!LFvI{Su=%&ix8|pBWBXC_s}7@#^-g<9+Cufk+Yk~ zx0pcNkS%GYpi&TydvMAIh5>TDrQKgBs<2p&>)9#Cpx4H=Drlq+bWF8o_6OHur zom%h*G}^qESbuf8PD?^zp~G(4LPFBR68VsADs01h6WG@yB6ha1X!_nW z!RY8fB-6z65>~xV|C|^YoSbL|b0!eK8LW@5Kp2LV)y({ui*bo^ph%v2--UmAdNkJHbma7wL?|Tjs$Hl8$tWm>Vz*_ zB(K4H;Dj2G>${2!>skX3YwQcse)#YVSp$xrdZmkn09~=D6(z7wA$D`zH4oxQgB=dU zqk==4R-Zr@!PIBxr@~@pL#N-RO-NW-Ie|b_cHynY*yVCD6=d@S!%rH|m(!NTJc3?I zN*Q@o&i6rY=MILtYRQ6Z9XPqYB(FFr#IXbR-G2pXiGWPpqO#B@O=V{gKQk~#TX z+J2J6G=<14bLQ>3L;0OYhA`3=Nx0 ztLC4XfitzF=`zIqs@SdMub#htl=9Wfi?FUPXl#9oi&yHJG8b&o!Oink%#2briU2qQ z@;OnaEv_m@6rjst@-5Y_mBE}eY(A>)>T*ew>xFP`{?%3(eTP19 z1cc{0ASj6OQ)lf;%1OVW~( zn6FXG^>5G=l$6Bu^cmh_VpmdkRk#7+~txx+qWOqQFKl^Oyk0@|{3 zlHCtTH7{O(!}XquO3yFnbB1JAlVokN{_h3ehkkwUE|=>EBdht%MQg=L#l=U*WhJ}Z z$jRyY$h)Da_SYKD4A17ql zfxm&HDT5P|&TRK4B;={k##Hy^haYfo1r`+_xMjb6^aVee1TXhq0}Oo3Zb04%35Pei z+K-k@V~GZ2&>F{u0@@>CkwAyb)CK=QA$RA*|78IJ*A32zziTnibkw8V~Mpj`8DI3jl)B6 zH#g1{HXF{ew#P>ULlYvs!ELc+1_!RGeCggkK=fWy?T<{mAtw(+{Wu&ahEb_X^Y$(5 z`r)B~a{W__?tLw#WTP<;)bsOO7}$5~;J#I_gOl5r{kL$a#_tF)db$xXl6cP515pXW z7mi2SJs^zdix$!rkns&kZ`UOrcSvYwR~NL546_F$kJ^H$neMNa+lF`9CZ3{Z8N4?;Un+{?fIGg@g!rc%1PrP-yz0kB!g9MZrzB6(~JW0 zbfJBEx8oJ(xkq3oI@Q+ZNP0aM798AfX30iDN4Hs0!Isv-q}$q^?}2`R2yBt#HJ|ku zak2_UYwK^_LTkNA6B)mfY){dPA9|h=;&N3Oldn)udh-3{g@lBpnO|m`y1Rug&nJye zihasE^MuM-azrBxS7{yGp7Rn32*ev?Ra}0S9^apa9*6m=K%NkZZ(e7CRtW zk?yj&mF9~AEe%W3huFSuI{IL;Z0+SW0dYC1&iiMbg@owvj2*rR3*!xpx}#txg7hDj zB{bI_=393}LEGEjz5|A2#eK^OPLaEh*e(!R0YS1Vr=>L7sB~kaj*uWx7VmbygO$qy zMMc8w`g%|$+FQq&%nn(_D^`vA=iuF4vPVz2*rYS~@^UDAE=wPudX=%3GX~KruRVHG zYwJHt+zFMH$tEkO@j?iB!LNTb*~~32IiJ%wBfUJ|1490fni}BP85V(gO;b~J+kp(U zta5t_nFTA5V`b!Oa@ao`q7Bg%JCH_<^Z_!5$oBSpneAC`b%wr*i`P#rTV^=aXK#M0 zy!z{W!IbUtS{FcNR69|Hu;6l=2+a)+BAc6Eo?o|xE;hd@ojp_@qNalAG2m3G-b2gI z$x6zeaRB43qfEgrQ$fE8pCStrC=GPO>?r!O3q?@4|W?$p)dEVoB_uj|1kFWTHLt$o^`=0As*IMU!u5)=8 zg~#Q^adE^V{#jYUdaFFO>l1a48W|ORiYD;Ny1lP2_X&j~17$2|p9f$2Q*KL($Hz?; zi{}QICQRS+>(bL%g$&=qg!EV(B0m>x9x3#InB(wmI_$dN7_gB{O=F8JtyxA}?i~ug=^tmB{ID58Zwd~7L zlZ}(Sy#oXMVpaLxLNx5!h|H0{#e2<+>Pc{MvNR`@yxVqAS!rq--4X-D^XmE}eoO`R zFqM3NzY{JITh#C0eYM;-e|A>nfyje(^aS;g92^q(#`cfokf;j^Dt;F81(ZS&eEq2Z zY`Wvc#UZ1jk%+%-MUfH1T38e=j#dG%_{ z>J8<|X%i)W(GNqiC-|i{4g_Q2e`DZx2j0bbX_YaGQt;^}WP+xgP;Z?126uET+gui+ za7``)BKg}R9QYGJ6Mh6{{-RZ~C$RiD#Cq@Nr_m3(u)#C%6WLr}=|Wm|P2@JoVHXTf z?(eb;9^lo9{9T-N=y-T|M{R-9++IbkqnW7*>+~vRd*dVW^jrilfWgQ2?=MorAWP|w z9zR~y<6PY}-|lBwe*ml=j*iCUC1sm#$r{j6F~Jdr%v@@XtbbIcddT9A{q)(RnG5Sa z>H+TLl+~XsqD+KT;>a(+h3i+eLCM_+vSV<=!>8hponbeEyPQSTTwIVZqn^7xLiORr z8qSA9OHC1&>mln~2Df7aPh4Ofk=*BL$IqOS*lx(LkG^05UEuj$QPn=oP`VhzfeytVc9aexlp z&oFs?c$L6%24CBn-^rIaBLUMjI`g~NM2CTz(v_y|U7KpLvfzzAk4DYDZu{ga0mtVT z_g-Ekys3RBFt@0&^5b~U>(iYsMy`>UG~C#JF;T#ZWJ&dsQ&bcpmr9C*)4jlBAGA#< zY$QtQ=vW0vVI>VE_wJc2x6?oP7q_v8zigCCuAf?4TVHo~>vJ~PzgT_2+<3{hzK+Lu z=6#W(gG)jpVd+Sko=%nn6>2rNZj|&Ceu$u#nYq9TRetcmgOx~DmAII}rVQO6ZJA=@KzYvzr`djo_3%{M{Y<9Sc< zi3grA-=&1>crG5;kFirF-*~w)BO)RK{ZYivUK&89{IvdQGX7(z@owoxwHR{l9{{pi z=V1o7?#|x}3o^pO56!2FH!DUG#w{vX*9zVZHko2l#RO--_NY~fiPcWe8!Tq4n-*EM z%&aoJ=~W_$r5$B4=#@)efg8N6w+#GwDlR)HJ?e}tr59s!G8b&_91#f6kt{fgp|=+; zdrNn+(xg~eSQ|?mp@m2oIVB|#1v^Byp7I%FdLSzbBW6cKz&F8Xt0=#^v1d*_B1n;; zvT9uBZGR8+*3&IUzrP~_U^GHj$9iKmci@mErOq|Nvteq(SW|||X>Xy+9m@Qp{s$7i zi3!m@8n5Ou%I?n3~6i z#qaRf2lyX@H&7)z?z*_PnHQhum1Rcxd)dk)yD%Od9dU4S9QKtPc{F<%nPjj*zUqBG?&O&H1A9evWicx&GI$mTyMNetaj&mjs)pQp zny&=RoQ!&gwR}8%vZ=|vWA`Y;U5~d-u1-(ZNrb#o8@z5Q_yq)J?uN)B@ScHYNa^F> znYU-qk)!MO8g=~^?1)rOL+3fJ(;YnF$RLzzi}_&{9c$y=^ZnP|J?wU!HdDn~a3Bw% zPf{3sEX+@|-P6FG{o$?ufIUxsH59M#vzb}@+FEC}x68NQAe3~M9n*M^#;~egPF>EF zU?_gi=77ovC8ds^Q5{!pLtIC_tMbQ3)B`hzlhg1t9&8w)Ro3A?pq%1hvw7>?-YyK< z$bbB(j-Q)TXTv1jhzcZo{h~Fp=l=9r2E$$KKA#hd`xJv>bc||^9))sYoFg%(&Fa~- z!bipSdFX8dUZHYyO2WvR!H%^%e(V-f2&FsQwK8>}3t8qN1|c$B&WmXFp`q!eT~)BjXtm zzXKWG8}n%#vgdLZ5J3b@E_)<0V`ptogO_J#XQUz4I@exKds6FfyI(H3!-UbEJP8gC zw)(Vr_Vr1AaTPU=xSd^oNj?=XFB*c7kT+OEVB2Nq-X|2QRKR@hDq32E;LBsb9^+Z+ zuaUB9)05}@^!Yc!fw zqd{i602a!W^<_vfF7+Rix^m=4W}hp|-G+F_#<706UvM#884^-uNA>mw#q%~e<(0Ya zR>zNQ-(0L79LV+dMSV~TpoE*dAd@p@{`wWfE*TV=2Rg`C^lwCznc}J}6UfdT}B$ zX!I8Y!WphyXhCB?D2mOT(IL~nKd%qGF`rFn&Zrb{zxv##n3|dz6cVyylkzG7_iz4) zFS*@!fw;K1pb@9?@#CMJjj3av5^Ei)5b1oLq+Y~l&u6V`JA^wB2!0EF@m=lFuu_Wb4*J#eNl8QezRckKE@xcTIPue(gqd+^v<*J$1hk}zumwHcW&Oz9@Oc^^Vo>O=`x0xa3Bd3}y!E)_PK>gRS7u5rETm`uA^Vb*7177o95+2Z zopFGexVZ6V33FG8(9o-(Z)~x^O)^sUzOli||Z*OlRVxq$%aJl@g5Wx|{S?8rE zRs;A0Dy$5YUo|$D1|8Xd98LZh@&LW&l(|MGT2PtW+r6eiJnEQQCAl;{p_Yn_hA~BV zfdRawCyaKiwD6+BmcA?-_-|)cNrV(o zT0=)**`Lzng5Ct1s>ctWa+pkUAWRws3%kpM8lxX+sXWrR{ z^F||4P6~YT3P4Yn;m@ci@~kr zd0L;G|oEHSN4sBsB?33weS}xKLdn^g)qG6 z+U||4i9{ovT*R}5^z>%1#EChrRz^l{zj_P8m;nev$ZF>js87#Sok3g*O%r0azNw?< z>8Y!@Os#*;3sta-IuQF?LHQXOlC<~C2WksskEm1A(EM6o>>A!Mv}xBxm{+TJ@O|)* zfJa|-V6(oSw;DS0rOyzQMH1ba!U*WiX1_&)%PKm%y7a2%#7iE-j~0-&ySTVi7EIU3 zlP4#qNI+W15i~09W(ZQ}wuTygd3Xrr?{^2MyYuaZ)<`z*uam}YujQOqrZuYdEz4JS z^u6UTJR*;{s3Egq!4X!U$ned{2hE_(rxhiT(u}*sKBe)cpx!H`_ds~Qq6@OYr{+qy z7fUDhK!npdqW|(WP0&3mD&xZhAg#jZ;Nm(N$bhBR)~bU3i{Q}Eu4(A0>pgE)eqB=P z0zVQKnF!4P5r|Rn#H+ThzIuW+DK&}JlBoH)J629bMeu!a@ZM}ijGrKeAf@7t^WU&>Vepy0nyB)#iuCuy!7#qK+ z;FDFd2y%$s$b1IUxQ)zMkj9;S7zT~^v9YfQjh!Dnd`L?}^PVLVTkiiQOxwDd8x z-f(4x8GP;6@>Eg%+0eEfuLh6I`Ma;!<%2pzNXT4$B3cQ6b26fT7pkR#6WyO$y*>^=t>g^>NNmz zzkbC}LT+vSURaphgFB`#ri9XT>$h?bGcQ##acPELOS5MqkJrkhuc9XDV(a>&aE#T@ zTlbsgeNk}Y-E$t^=QvSLg|ta|P{ul4fK;f~x_r&<_!i@J9>Yh-s!wp@?Xs!1ONeeW z-{=b0r`&km&9hJKW_)Y)Yk^^S-)!1jEIMtZ(}^bwnP-)>G3jy;shwWadz9(pSu^n8 zKq3H3VtXNfuvXRs>--WUjzTUiGF|wtyh?6DsO z`?O+>nv;bm%vEks*|qnHLkHkgLG?j=_;jY}hSa)x-bzk&&ziV&!JDOAcky5GaXpK zBG+0y4x=fh$*ZdiZn?kXDX)mEWN7(qP^-5}bsT|rSkVCO)ZGdT z5*1+*+JUE4b)9$sMpfpt@8(FAy=M1f;^A@Szi5}XWZDfgxa`6oxVp23n*474vM06Z z`3`4G-_KZ~|n+J8hREBVZ!Qk|cAx zRqdLS^IA#mQ$=W_UuelxO`3rz+w+yNlR&*E~07s25uFzV`~7` zyF`s4hr0_uz=`pP4@@H>L`8BI0e-yYF3qHBu#Ye{hLthR+av!Bl*T%N8A(P;Q_?r%`QXvJ*>td8PhK)uVTCFvq!6yYcS8o5$u58-9+NsuAu^ zTN?sZ{72j{+n%R-W&DVepVUD1bP5?}Z(~Te}XSuUAZw$58Kt{S-z}OfYa8tcn&Gy#48P?)I z+0(hQJ0tobliWMQN}+bPPGn)Z=j`h0+Mq9J)Log+f80Z@3O?EG?d=8E9;`A>HA#r~ z_o3C~IL}AU&MGU{=SJ%Zh2SJ*=k#n!`bh=|ShgfkkC6x@KYTm(yY2Sc+pBK{E8%cu z;K}7}=(?(hN9}F|xh)5EARs0Q&<`)Ov0`kuut)CC$Qg9X(73p`Qf0+3O^j+>(GkPM zC?fe!UmzlS@t^1AT^erZSwPDCW-E-NqEkETF^J1%n+4tv4Gq1f#&}6rfGnNh>7+gV zw6*n8d(#smBsKJ4KbQV*Z5DM>$?PcvEQ-;IjiyAHn4Yfqr{^Jfe||G{-c)|Dlvz{4(vlS6zd(foEs&l-1VNqsnw>tS1$Oed zz}&2E`I?Y%z&f2hbZ?J>QBt>Js9PMkDE_&$-55^7@X5#!dfC=S!ksu;zVsJ{c#C8b z!{LA;;4XG@ze35_vn!&91NHuus4ZY1LB_jQ=_n;7b#!{z34yd|-B7=>qx>6~#}EsZ zOWP*9;&3RM)8h5vbhbuI)Q^n~_L7Uq>6DQ24d$nBnG=3FZX6hA7~J>S(W7>ViPXfuj06H?L*_5a)lZbAq76nty8n{-aW# z+IU#H*)usA2Rb^xB_<-gt*Ankmmk5bQBenW$Xz}7*`tD~W=5YTP0!#Jx6SqSHNXpp z8=#)8#Wf3q72aMU4+#m2RLeqS!D;WP;pKJiN=e^54YU_mee3b#Q)TMK2#Je)=fXZg zz3VdY+D-!b!&1mH!^YYd~cLp|U*UG^!hJj4zh!lC7!eu&Z00p)!ADe;EK z@6{O2wcCIRCs>k*b9+ap$K_BZQ&UN4HQ*&}-E=~Bu?XV>2{o(hSZBpBp_-?Gi-#uDGL&QXH-eL%p zu=niEpqsWxnO^9!P1Ek=SVMLb@mrKF3b{2oC?gL}Yg5tX2+%8uP|w=u*s(!pOCURN zHP6qiEJ|HyOCUEvG;bsi3U8R#7z zVB?Z57VtAo3960)ZG=Zg;B2}_e07vXNRa0C-{eTs2jmfd4i1Jv=i;w)BbM}GWo1P% zLlY$>4lk1MPGc1YLdsUHZ!{kfikW7IFrv0`X^H zYdo287cEU;P5b=~8#FDkB2dM!1MsP1dgY_Sn9dTD=ViIPx4>!&TZ`_KlQJ3Gs#x{_OvyzQwO5 z$*Ydg180FowIu~8Y&ERwl2BD5H#SJ=fKzt9yN#U!>kt zFWi?C)EoEqF9fBjILwK*o4XcY$EEP;Q(soilA{9+D3$)vv4{+hlM1Cq_8G1unZN=j z%^V%L;lKM^0+3ks=a>`n8KWb#)In7&ZruPwXuI94QER-2R9;^GA89^L;El(X6fhkh z8`Y~!iI&8{;cF6x7i8e;|4BXZZ#@w&FXlgGN8MoMps?w=##%2os0gHG`}jSr`yPYZ zw5Pp`9w=ZO3{FgBC*IJcS%KR1nwVBg>@N!eL>a6~m{O$&P}R@eYV~G^+1vrK{)jE=GWn zlP{1wWWJ1HNot&_HI+-SxKYdjf}|*E5mPm~$B*e798$g@Qqa;Z$+R6!KEt!#8M6#E z0Lr;9z;x7cn^P$|u-s@@Io_i@EW3_JtKQN!)!^}ChWo8mrbS}_F99a06f~w(jKqm( zZx>Ce16fbPT)i*TLII*T>={~U^GfgN#m|D_gLX@uWO$%;FaXYGv~+V@TP$m6#%xJJ zk-Oq%X35#YQC2SiuYvkS!`SWLsTeu6i{esHuaDWww$|gkctLgTq^Wfav>a5=Gv2wlh%6gA)Ap!;tMBv4e#t3EaQVHb2<+*{O-R21c!3 zke9%&x<({rBy`88Q`Am}8wTr_-GIbf)IgXb!?P?kb}jP}@?(Q4^{c(mb&JJnKM-9E zet-W0OucQlY^X+HyP{G9>M*kmkU38k7rTc6jzDq?zb7Yn`YAcNyn*n;3!zJZYdF#U;Q7_8&=&z+?Ma zLaf8Ol{EhG>7FEzblEzcZ(JUM283&-J6y7^o;;!Tb;t+cE!uTKx1jkjzm$37!-g~m z*PEwL?HHdAybY7H0_$t7waq*IoQtj1o1no70x2lU&<hLcm#OFS|s~$*=p8nhJ|f znXZcT;0tjB;0%*3XL6s=IjyNXS!UJ>0$8ztMiD?rO}p#kJ10cOyiYqW2Q3~ExjHS7 z(FINU^U<2VL665OH7~ADuewb4ksgI*{oE=X?{8{arDa-@al2-JmS92o+U3E!wpHe9 zuaH%s;YdvlXQx|L&fhg?6 z#eQ@i#NFgeFIy16#R{Fm#VCSNQTOH+uWeNTkN)!IOHiRu!aNRK`|0`c;t^wGEhs2f zdFbfIQBl(+-nIrQ@4JDbsU$P;`g%%8j5~#&r6DjTKMQzetd^)59UGfgfdf=%n{K;t z#RH(1`~##fYFFFPsx%tSX|{fIU^xdO>f!O8Fk&IE`^DN*+n)UsFb^S)5#s$3`xV@6tPYVF0QbA@G3M10D{#G)x^cntgbldO1(^tVuya3Gd~ipq3BW9Ddog85Xn zhm%HmEL^pspr8QK^_gIIs)S^?FeBNXV&rz+dV-51wVI#N5^I-hhMamKu093mNIK0d zS5|W~0ELmVSO@A~92~OI0&#vy+N1>hMmzPCo3V=u&rK!-OhXd=i6ER#5pn0^^uFBo z&khtN5GYULk-V9ioxFy&QVLMn)j&+i$VUCY%Dtq%peUM>a!*SpiH>>xXL~fJz1^DJ z8Uvj`RAG606%#NlfR8?jACVJZqGY{)$)8Cfy=!ecRWLR}!nv!s23LLevES8ZQCKckB1GrHxKJ__a;06vDBFio|&+*61E z*!m*=IheXm{`vD~>*P@JCp9TWVrWHGW8+ov@#&(;Q@hcacK+!4Ch((k>SWinv_h-{6-wSs_Lce9b)wNzYJt;5ZB`j zbacA`QFs1KT%p855?DwN$l8MwBCx`KQ?SyIgNTsjMbv~=e%f4#W_oIx*5#yU%WDp9 zf50rIEqO_eeW#X9-{ zJZn{{r{+^d6tEdqpgAJO!ito$ga}S=*SUAB_pfg}_XlFcwxhU5$l)FO+mQTya&!4O%nA?3d||)L&WM1-1Mc?^ z)a4H9^L>4i7PAFp6lL>e6mVh3@kUn7;pwU2fouhDu_d6*1eXL2jR9YjVs~Zwx#$JO z>SM6sJIsnRol7&8*9`Y6&mTURyQnpH&H;z$+S7@Nj~J~{*-!wPz8}$c*5_~<<`eqz z#WM1MZc*Ox69y-7oZ%Oxz{TrD8Q^FzY0AVdMZ3sd)L)qIv%0N1iRtF;vo z7k8AJrv>kC-u8N{WUPsUYjV`W9God(6ylKXaf;+$zX&(A%SibJ+{Y$9ifb(%$(cf> z;F;#yQ{$?s=(saFD<(vdv)Xmvd3K`3!}A?+$?ajeAi0ufNouq3(MXwo$m}+)Ut1tk z%uEprshS0ag?ul0>&Ad+f-yNIrEc~7iU1vdN!yOtK27Q(KH-8D^zMjU)B=!p;BgH; zHcP?e=hCs{(Y@uP8GtfzH)IGpGzRtN7L+-);$1YYuP#Oche^0rb!A1|X#E`#(ZJye zaY!ljMPuX5$_|kakTS5XxM^zMg1DKJpHGw>^F+XX=}Vr8yv%Sqc%GtSeazO@)uZdk z&e_=~D${Reh6QIVWS>2^`dqc|AQc{xE)%EIhdl3Xk%ojjM3Hy*_vhkl^Lv@OjgTRiUN$zZ*W_O&&jh5-6Le4lrSw zaQfMT6ZF#z9aLJAI5;qs95`4*3O6Hp?8 z^u8yOk63a8%x?;?Q~uJo^(95)1s>iFkig42Ofc3qd|g@DleSzeqi;4D|3gk-m?C8U z%D6@JA3)hmYGWG?D1-9kIg|GC7E2B>_z_ZFLn^bM{|uYOSFi~R(8`)mbwg9Yc#F}|v4@6fR!;k# z8Z4A5_f^n~n%3rb5BejYf}0zR(h{-Ih?QS7$lcNeOcN2&k*G_EF7bDE%V;a{^I`J8)OsZSc01EE0OUIwjy?{0O>wH$+-6?i^7@trN@>8*r1i&q4`V0SvEk2 znrJ~fI{aq2wB-SrhxCwu<=0J$0K>fv>Li#wH+gY%w1I4I-9ClRd}q?uHOCnBWUR!$ z{O4y}QPC6v?mx61jD85*sdYYMVEPs z3_S{bbaWXbZ+k0(g8-CfD+d`1k`fXqK+-!#IQ@p;&pqsl^2<9!{^J?k*w}cSX9TU} z)rH3o5m{)Gr{juxts1~}q=7WC&7(BYbx z04c?Zjcpk=sX*lEoh3|hIzF2Rss#BEhDc$wZaQOO0y7c^tRN%koAD3S+Jvv#exdjg z00zNV@RONeWzN=a(rl=|tUAPg$fSdr0FrPf5(o(9HEYC0UfbGwUK!5j#m3HMZ1E|m zc6Q6^uzT(px`~L`yr#zYL7mo=1-f5vm(xhN_n`JIDQ+D?+A$^oW0&K--QCXwN*yh$ zjnUAQZHshys;YW;DM$d%7@+SCkybw61;B_%i7sGfs;^exsj2Y*g%}glu!$xY3k!k| zn3@JHHkW65_~3zhe|)KC7tqxLcdz}hU!qI}m=_hd-*| zqRQFN1o;P-JG<_&Kr;)X#MT7@;(f<#-L4>~W%E}G@ujwbLT3~I*I>=~gJzUbg_`K7 zBj0RcoG7dAy@PHLqnCHhm*b{~1u-Q6>`L4+*9jz9K_OwCx;}t2AP_Kny~}0wnpsDF zr*D1R*Nq0`!)2GRpW;|~o8;EbO357{Bl!Evv=6;O@iFL8QB;Z%)>^b(SOAlB>*7QD z{Zttq=9|-iB;I#p1L-)@GhjBm=kY-_P;gQ=x!w7vE@0(b(*Q_8e1D!Ao6i&v(-*nj{fMJM>3(E~gEIMADBx6TbT*%})#Y-eXKpr{A*sCOr@ zf4tm$!aO!HF|ol5sA?}*!%e>R&rR6fju%*LsF5hE=?&10FObEN7&+sP_+#E|anGQ@*`=G$4t6~t9s7A{ zjxczDb9&&&)e+eA&ag9_6hl%(q#h@GG<>PatG_MG`$4?{eh<(NzvAL*SzoLJA~etv zsMiI`!LIHxIID!y-}O;LLmcQomfUW%qGJNsy!H~=r$_;MWLs9o;IT^zlvSe^6~L9V zN5i^B8Zx8Woo1vJC9T$zhha*S`-3V^%TYEGNfA zkhZWjc_K$wh^e6LbkKtYS2lzI&}6Kf18DNVS%|1>Qx20e#4$OQNi7t2V>a&Xt&-aD;NW zAHw_?w2gsI-l{SNLfD!irctwu)xlF_5$Z)zzyN^O^*n)%tv!nj!S=bCt)O3Pui#=3 zmv@M7svL{^Kd*~+ERNSGcDmYw>*eW@;GHcz zX@6XQOE4?D&cllW;934vIx}|&F)s?2aL{CNJ6DDjT2+4D!^7Hjoj|=n< zK=_uDFwOHW#G%ZII`*8b%i7z~){gh4l;$zmz#8T-a!`q6;1cqL`uO1Z2gtkz#^wlK z&BDM9A*gRHqR+5AoR#_oOpWn)kX3y~idp8cRAqqi((wtGxU zlB>B5;y94ZJ+rd10-Y2ykX&M}_%mWIyVrT1rO18jna#~#mOBEM^*C!{&vpoh7V5a7 zQc`*fCe0C1Fi@9s@$cJ&v2Kx8`^1k_JnNEyB;6QdLpnTs4%AEy#9Tw^qL!A7h0PW> z3Tjc(S`B2bRhOMHiI~gx$N^7#Lk^_;={Y-ebnP_yBZBP<> z-U*PK{3y%B0a-DaJNCTkjEP>wb9?JB2N;MMM4J%D}U6uft# z1^RLX7nejZU5$!W@Jny6M0q61@@4S*`^!$H!E{dw@0$U3(80ZYupwux{M_4PMG|bT zDhm^!Zrj4g7k}#`gy4sawbBNG$eT(MW_Vl^y1HJM$uN;3!CGI;1Js`NhSBbC`x@wp zm4~;lfo!qN<_JwZ@a5vnUsPW^6YOC-2s3m&U1v!gC|44!G~nC;(;VMFeSV9d6cliP z9nsdYp<+&ko7dpybnk5=@xWJhqkpwG;1Cr7q&ff9U`3uRHH96e z;$PX&tFd4}4Ds+jiek$CSIgpmlLS@D5dSOBv=dZc%AC-A^U98Als|TikOM21I~mSs zyaUY9zk1WaL;qJH(EmA;{O`)5DA>qST95dr#c*=iV6!3HELBWj^ZTbLFJGjDe0bvP zq=05-O7B!wWwx)cbz@#QbzbIZxvy`&KQ3rh>}bAN;P~$2hgoNd7vKCD{P&V%jCH)c zWeA>@GrxY+44n>5V82MWAK|%(NOIpuy%G33Xx)d_YJE9Cdt%j*T#NGc zz2u9njgB*V>Uybv4`sh*&B4s=vJ+G~Y4%elCZ2tcer@X>QRX^u=KtOc!8ndzBFvQt z;oc`EIT`P=&V6DicYwz;P{%XIv*Jw#ekt{MmB za`ZFD$4{RC1X)_)47ndKK!53hJTmpnR_uhQHdB%6`;lr z6)&wRGl2@MY8tSnXQ3mN)av7f;xmerj-MRNIW~>unB?d|f1Mnbl4p*uzt0=Dy{m$( zh}S^cJuid?51QPm8{zM4uX~TMzjj7#F!AUKT_it6nk1Mdl>L;P>q&(xUBmh9qr6>- zmhE!;X1Tys1X)>Pgu|=3{316b>GlWU7KN!ec0E$9vU|+uyuT3{d1ut>0IdGO-+R!y zdUSZ3#8~uiEx=&KNS0#3J{l3K_BfMom``Vx=Bghe6s|v*n{&!2=M4!yTVsbI=nnH#+{jiq?ejkH z@{Y6wBbF{7A{GTp=7|!%M`4`}uNO{AtPS$LCI6s)PD~`9{9rx}Sl3Y8p%qP05}9Y1E$|xb& zH0-)tTwLv4-eG2C=gEmwb((gr?%$CVOXa>VRs=m}sT@XNIc-~fRMNJr&Xu9B#cOGa znR@3Xna;Z|R$kE@qL`_Fva$O!-!t6*vMx53-)Hn2o33{D%ed$ytZ&#t83hz0Ky2*{ zoXic4N?tNw1dMXP7wGHUX*I;dE0U)3D-Wkrw_O(%Ntp+xw9D=hp1HX(_28rA)(Wc? zHmzXH*4>c>zRV7DkI;^-ceskRttn@#NqhpjFKp{}e{Lsdvi83GU!aD%!Yygtshf&?;u^)2zKYW)bI;&5+TLB=!_Eq%dmmziG$y^)^2&Of`?ut- zh?R4fdQ;lqwi#|>aU*`9d((7Ii>Lm;%ei!aWiNMnw{E+EtNgs_!sTZ1g31zHt8G1@ zZ6F&qN?;F@&?$kOiQ}cN6vo%DHQCBomdOg2_-j#wu5w^!nE0-;f0{6}FZCpnMYp;r zJ1U9hmIYHcteUqQ_U0-AP0~HB#^S*P^Rg96g$Yq=XI0#E4Cx9+zlmvUkG$p4mX}_u zlFHvbSI6?{8NcwIq~3%^spJZkLFUKkBf-V8Yl!!}d*QEby<4VL7%p1Oxne_-nivv1 z9@6%Gvs%Ne7xd;_Zk{lsr!R9b3ZuM~Q^`K~@PL~nkVo^Xrq?~i5tVa3y7$KEMms`i zz>eQ1LY?%hx+NOan%pJrS9Qr9Q8;0o)Dxd2?B*{hUwyTD@WHin9=XkbHit-C44#tB zudcS6BXM#RX9SrAk4wb1weHs}4I-15Y@Tt+4e`#dDg=Ph)58H(m_`?-Kd9wZit}BQ zK>T>!Ir8`g^99o0Ia}&5(ZT^z#8iuDxh2n3r|1N!C&kmVNpc$nErG*QSFoS;tC+-c zCt{NA2xVo-qdu#lw~k(O*)|Xdj)QfKZ35b#T39PC1)t(Fe=j={=13-BV^bcVxJ9+J zY@n)l)AwEU7{3VSOag;~DVa;1!vZ&TTu|M`kvAC=M`}v0c&(eyp2IxubCnd~?=>I9 z-b&SBYVUsM-=Fm<=E1I@^G*%v%WIvp}RcZV46VgWd3E($%mVYnz z@TlIq^QTiq7iAbPrS_l(EO5N>es456&aLH$@Ck2#V9*O4C1W5KJg2=I%^M04Adi3pcj4kYv)NqneM%& zy1M$+F0MQd$|DJxO5HM52}!ww6N1u~d!+LS%oW^~(cLo!4L5~I%ym`xax|e6eV_HF z(s+_B42+<5(DKLD?@eV?>4Pw`nhnPny6kk-P}LLZ`_ewMIs2rdpr|+~%LU;qF)CH7 zk`eUNAM9&tB9J#Y{%0Olry(91kWS+gb9!)$e!_VlBq`1t3qOmw3yZJ%&B`k(T)+fy zs(LyQHMh~}X~4P$TQSLa{0Cz!DMQAV8Ten$Yl?q8MbZS!j}?>q=M~_O6chQ6f4?rb z2!j6*4Dm)`C5igZU#=XEtJ9p&+h`CJa6V0olL3(xrT_``LqJv8-&j8Vc;Xw!zzv+w&g-gn_;wk z#mqtp4Ja$xP!lfZWNBE_8`e5RH@3eQQ4#-pfkCX8<1Tt-1w}SoAIv~MZ^4TP zH|H6bf>mn8x7V3BH-oTtY1l@W_nmuUG4A_RI@A0FJ!y4cu@vls7Mf?3HTyrxcW5x#v2QgmCt5#t%ED>!J&+Z-z)b3Um&yI(2Ri-#?q8q!I0n+}t=SNaki; z3OC96rN{oy$AHL1r_Px)|qlh zsMl~&5>|!LJcdh?WmF|8De{(0>;-SY^!SDI?5u?ZX@6!=Xqu?PsC@oUkC5bK@9;^z zE|kgXme6svaK+3Y<7&bz1O!KAnDIhVvimKEE=1kG0`dY)A3N(-F|DkI>$I(!3Q!uk z@H2wR-xzecWRVM}Ar%rH3k-uB`FnFGxFJ6vU)4z`P3q-LWOLY2e@|7XfbjQ=l97`0 z$FH8|=81_$md`}C8?&{hPtr;bBU8yD;Rk9w7-W8UHKH??EnH`|js^y%4>jNAE}S+I z2YTFYkE11hQfz02x+B!gW3+lhyaJ7+g@zJTyrVK8H-!>&_1TWMP)IsKc0B4!xisva zh$iAXW~T;XZp9P+$4cG=Nu$o1_5;)%$)>KH#4(t;lzM{T+*i`RClKF-@jVZGX3%OS z#A4iIR~Hd6TPBCUu?3uBL{}B#A<)i9d(Hl*D-NS$^=a`_`+h&>n>O>Bs_QhX5Vp=Z zzYS+`_sv!9qFmkyRKMZmT*i9zi;C)O=KkNQYv8AD-z$66ukT|?xn~S+4w`OMs~u2T zS&irB_bGlhVe4nb#tZmEASYgf`vnibi64mRO_26?c8We*g8|SR^4n$X*~|fObSB(Z z0iQsd?k^nldQ)qNywQ4$12-mydPHBWu+tQg{@}Pm!D;Q{83#vW4>*sINIb_s53U({ zgRzNqpC^nxy!g(~@AekGtuwYwp2M(g%PJ5+ZZJXSi3hU$%DG34OUgt&0i9Pahna0_@E*^Ve}PNiG@N zzFYThvItmG*6~0dy}!I;ij`uNGP-1&CU*~nAHRb8FpveH_&}h4Y`P%7sF>Of`yW5% zc;hKE`vro7cGZ`0Enp#uFS>Kzc0GcNj>HXHFd6+kh)`L*{~gRXlx_!w+{XuAT;gWw zt21R667m}Fveq9|);|LJG)49E7go*?inM&%3=^rPv#5*pKyqrTdTjea2M#k_Cvn1? z3rHlmQ)b7TEIl&R*fvT^C$A>oQ2TOYtnass7N!(!CA7YdzC#CEf~&d%A=r=RJ(vd!h72DaDKA}~NZ#gcX$&8iCu&2jujJd>|v^%`VBGTRH!TsA zAuZYWkN|`B;l>*xMcSk*1k|IGeBNUcjcPCCxc;}w8$`!;$PxU!O^4#}ST}#3GIMx@ z59+6Yt*u5NHJJ0ncXGefbN~82c-}Eqz}@?u>`>^IW3XY4j-=c$SKueEz*UwTDe|DT zvd)o?UGwb4X5w5$ztL|}qo)lL@(oY(Du^;5UqHe-RPS#%FWMm(D836>^E;ylf5*(@ z9@oCj+)M{i7uu{dwq5k#Z84`F=jQlWJx0bzgt}3CyaP^OwGl!r>80gsOXrlDbd(|- zQUSaw-jE*~;WQtzCcC2(P8b1-&;@l~wj&4Qws44*MC7Kpt_3c@2c@WSF&BJd$P#h@f;yH`3D54T5w_ zcXxMpw{(Xz(%m54Al;Mw@O}Tk{(tSY&e~^>GtL-}DGn!dKJWWJ&mGrw-S?eUQ6U)o?9vH+@adD`SyVi1>Z_jq5 z^eae5LHDSws?Im9yAl%BA)nd#0|GHHV&V9l?~dTg`Zt)@-iRfe;ec2a>LrKS^OrZQ zPZ)x0-qe06=|UXz=;$>MtvvyVFCtyWcXGJ!a3b{* zmjnin?VFJ<(1MYBaL=DzMeJk3-#Fo)IgDmuTfc!3fCrO0pkaxKQNTct63{fT=^1_E zsh=&9zzYBg2-EM7FmE#MRIk!CZ^aF8|ds1Ox`$2}=$W zC-wYFXyiADFu;dWP}ei3)al{H#!CC`qohjPoTJ;!JAlo$csg32kTbbQ4se$5)sVL9 z2LkWBk)Z}mU)fM$-4ED*)-CaKYLE0B%TB2o-687tZoDAz8BV~LE}xkgpPx2q8o`33 zjqjgFnkDYyEV^!ogOmxhb*QecuC!fXft>ZY-{DJQ@Tc%l2O{j+D2^>wHuTyR+rq(} zY-f!Lddr3ms0>x;9?QC$9=@15KWF}rOg|@I3(4dC*JmIm6Bhp27#vEmb{!Dfi`UwJ zahD2~34)|9o2Hv1@xKy&=NuAiEF1T*%JOw=rXVKAwplEvGoG^v7`lpn?k-M#xI{uYn=!UE-rhZ`T(6z`CtA zNZp#gfpPpG*c!fMjk}E^*WU+s1Q9yNTscX%6wRk5E z+`f$s+o<8>Aon(Y*~2s|Ic?fc?zZ}PP@ov(i-Ojr!;V#D1P{_o333~qjEo8+Xb`J9 zaD~2jZq9_uSXdbUCBhv6MgSK>gLfF0|4i7ye+tqtdp3~=sH7BlHXLqR}(M1F~s<7sfZA#N`IB?F9lr`NO zA7#tNx_ zw_Qa`cIJ>|*$^hIk6*7YLP6skAww>z-xDuoYujzaS3YAsVQ=y6(E0e}vI+X}kN_!u zz*l(U&fY#$5!zz`1ETrr#dE+Aj*Y!ZgWo^9x!H*hZN9kVM3+eY6|y_gTO!3DPywc! zF)|-F9Z)?elsnJ@vkL$)zL|&g5rh27Q;LJj%0Ch;wvSgv2H9`y$)P>}7>eCw_F8fj zcf!gUlT$al_K`J|YeD>s3)rvXdH9yb9tAnDz(AzM6bn>5HLfCAfu%f`uxfMN%yR1j z`Jz72N1Y8i3gP_xF_-DzlbRELJ+`KE-$|0|7TbFySH7+1S&T29_ES)Q37Cim zVd$KSQkBUzLH(jF2~QY^HrI{b{Yqf>t3k^rNmU#eVs(wzaK8KlfYs353cTbbaJrq$ zSVjB|;+a^GxlmO#(8AE>QVH(tbt^hV0q{6&(o^^XrGSf!!khLYeFtalzRm zVS)w{6w9je^VYsezHH*q=H@id?~T%raul{;Iz#@xBaGwpz5|5Nu}Sh`-{O-=uBum4B4+?9_GQYf-p?ATbQ*yH+Z04;X8B8mZeiry zt{6@&wi0{Rjkt#frFRTco6Ls|xO=Z1pM-y7UYN!#T2< zdsh}ovbnn_C3S3P*oaQ%O`HB;u1qlK^U;w&^k~`r70k&H_HOOH+}I%H&qRrhUCl6> z7DS+oEmCp|{lFEqd)2=!6EG1tmM{!%z!$qeZ<3N&FH`fZ zz~N@03Y_Rm-WMGLK(z80M6JAc_L^{K^_%{g{9CTvUvo9I>!SNC>H^&UtM3`hLN{u9a$Tc z+T_MnqAe>H>v>VbsW~vWWVk4 z$$*G;_w%At&?i=&+{}}hybu2I_2uR+&*AX~g%1b|(6BgzeXjq^yE!(nYS+cD+D!kp zW3{Y?&_tOHRD7V#I@-aD6itD61n=#6x~r6wIld{jWdP@5GG+kaTmgauI&Yf={T7yH zEtaom=Rn9YHhWH-@s>sZ1E6QN;#%D!*;FJ3nK(F$K^g#0_d_y30KdH4>GG3iIk6GP&qrBxFOvV2-4C1;l{=pH)j!? ziwlcv8FEmk#cDVSn#(mp*EM-+k9yCk2lU4TXYb!&m9oSY%DH*){svG);fZ+|HW};6 zxoei>2hfkLe_)@WY4rw6vtyaz{J7~1b8uU2#wC??#R=?OBF!0xV~8y1-_>6fOGQq@ z>9KIQwWf#lwL%$)aLByw9JvT-mFpuwYrI8sR`{2Lsf_*zyfq2P)8+X^jZWvUG5JH1 zR0$;e2k$J|kRqd8K9@hNGPga_o$^o^!Cmw&6Qjdt$s6|%Doe&NM2|za9ldE6@%V*} zVS6=b)L@ACKsRhskup+}c=ZM&)>cQY!x21(@#$KGaypmwh55$RoSgRn3yl$bP0^qJ zsgz}GzAilB5S~2*s8{dd~k;VUxBi`JksmX<11R$o%hU};AjSu7}3O?fx?B?`te-YaMamTU1*4o zUoy1CmTe<<$`}|X3{93k>2JjchQ1cjQYf^+Ml@n#HrugXjxI@m$G9#|9=kf$F`KYE z3G!R7LRQ_6zU~iG1=(N~Ck|neyccza3i?Id3pqGLUxma5jN+bdRtw=xQ6^+@l?~ zfB}VIDU+Y3!(=>egn0!L0D+&MuZ>Q2mMq_&T&#>-%hK`@P(}-b*}tgK{-_rncl$CX z=#Pbu=0)PVjvzr}i0Z)AM?DpyugYFY_go|Q9h#ez7Tua}?*_=-lakdT4T(s$4M z`{RFqp9U-(nCH12e%25)SMxZUvD>w~c^XS4Dkk=hjI(LP-DIY$b8HT-owc>rFE<^+ zw4GiDq*{Xd!W`Xex%065RA_zRT>idJ;vj*ywbwAn;9}ze@3%G!k+#+ zF2xNUE^vA@NnwD#hnIWImfZ}*e4r%|D%7-lwOE<~S~iFSBtD;%LlqP{`<|ZiN?L{v zBLb{_<9J%7$56sxdhm|j=K6#|4R&zuJBD=2^>hNhxP&JBOLf;Ng_sz0>xrSdP?I?m zMVjc53~cM~7IZz)T=mQWMS|IQPc#-=wor%8w zIj0JsI_L-ktN$pvgZn zNOT01KL3N?2z*^AU<54lWb<*xRF81&OK&%j*qAPV=gy2E@=$q?jH;Uk0~}6H-quUX z;xvNqdYAFb;p`kl-o%byt3QWd&y6LI^u64~+v zqLM8kIXSrnF;4Q7+=LcroBo8^Ev`Vnbw68cgS>8kk43k>SMJiVvhGr8MG}4w;WJ!~`KJ~j zywfZTMJHurOI;w!7LLgE``Y9BKT-U1PP?T~Y71p2Mawz#IY@dyF_DzSLSJ>Qxuzoy z1zb)9@X<~=#RbPElh;S7V7QA;4$n(=Lk6ZiH(SME`Qx#)#ByPAMlmR zU%xeJ>kc<>^SwKAPgrv9oZZ`k| zs@hzwt4R4F=uXYOtfZ(Q>sFAZ)YxFH4?y?JNLKk*R$_?~4LP}G?c4i1HRajsPy}J8 znhPyyt!?4M`noJ&h_M}}kPvTf)+q%rKg^2$+$KOm09|bRhREU`o=|$9f}(TX=pn%7Ke8&GAxYYb3dIo7ep_xC6^xH=sxaaQhakSN;Zahl>r#peH1) zW~U z{}ZInRh*EG@Iw_VR9wZa4UKo$ib5Q)enkNNqAB$bKgh{1&1!L_qn#NeT;SvkPbn%2 zoM?Xn(rs)}!c9y~JBVIsOG*TpidQQ`8P;9lyBeCAkZ^J$WPb{u*d$cYs5M)V%Ac4r zxig?^NU&el@OSqxssS(Y*+bIPS$hY6>dt30XJN=?1aE2& zFr1KcsvoNH+Vr!pV3w-ujzK#3nFHS+hNk!1FJexU*9=>`pg&f>{o0zd`3cpa9X0IXyqfnp8$X&sU`6dCXe8&cJF%1mb zxF>LHORb)a?dl!+R5&?0c?t)MbRVZZ%DTZK{SVl=awPQ;5sIip>l6KsP<9|O=e{v2 z>Q?5L-rr|HBQqxsn0fO|qQ8kJs`mcRdY2%UPFP}? zuvy0ux$Yz64v1q!B_a4*3A+GnFgDiAY23>FR9=e0AL64K&lkhsy%$FV+zrDpm!?Iz z8D%VdjhHW(@KJfVQ}M^%MctG-R$i#Km#k}SdYNZ$n6$mZuSatMWNv`6??tw_8!HH> z^TBq85%|cmAY{4#fes3CV*z~Xxp0kXz))?B(aXBEEBkSdKDvIn{6$f_X{9fe@}|o! z1k5$%ktn?JQBkn;xUw*VCQLc9AJ3klu5HWNZm>TgJCw1WC4i8-ZPzKxdmZiJdzc!!ru1ki`oKrO}@*@dtl}<$xK9{|96kCZqO9!FXSj(~5_Bdu)}w z7u4qzPNJC5>Rgui2ZjUiUxW?YOA1fKe3)5KNDtA$6}Z8ikRAyrpdoNKFp@@&;r z8qdVug(64>pBWmu5CK92NbdT6DTj&`l5szL|Alm4yYioOM~T{_A_ARv;LMZ$O;@+C z8|^Wccd7tV)*o=u+z&y0m%obK5ocRv`mh{bOTKE5_Q`}ayz(Y~S@Wyi(>omHmBWsW zB4}&41eGA+*lAn^Kw5em9iOS=mdD3S11&xIvEv!&v$-c)Z2N(7TN z{xIee=(nQR*p#_pKqV%tb_Wjv>;#TYXR>+0<4E;fej)u^V<=TM``y}l|R{%7OkijlD=de7bR2cy`62h(}6fHz6DXhECX`rhEU!`imKZNJi= z?)Ip5|BTl-HmPpe%^xuBY$6O8qD!$fFiAHkpsLn9--{aUm8?a;?db}{&jDZ_fh zUEliytTz0rS~%h!sY){(O)bu$A!yze(;8Z0V#wkj`_O+!cG;$EPO+{0$4zN78gGSw zA`G@Y>W4@5qsxy21^o^rLESKm9qOSZ0AXI`%Z)+(|BTIY9zMOdwyf7g_jt(XXX3FnM zO-@fQm|G@fm;$BJR_rG;L_s2Jf5|iZLO~L6B(xL06dF&r1X`M3HM{(-@^YVG*+g-zEzq=vum{z z1D{*foid5i2m2+!0)X3?Ydwp5Ho?qd_QXnt=ST9jKOyzZ7axJ<=%==5cGhXXRnf@? z{n`!R|Db21a{_?c+{^!6(Dv+%Dwgu2Q-~DAwf!7=cfBclq%_-%@!VAD?eE>&_GiaCQ%zf#)8?2NyX6U$==svppeH9sIuEHA2qU{VRy=6mZ zeEimZ;{(bo&=p-oM1<{m&dBz5wlvG>^un2)s>sV}k2f1JY{yj%+*gqNylHj8xp}zIe_} zbW>1*$Q$$}Asw{TS^Dv-q^$7eTV|SA4QHn^YYz8ib#-&lqv()mn!y((KMer2PTrxH zL9W>6WNs3&RJMSHMZ87#BA)RWxMJ5(NEo`=x#$NzrZ;;&x}zYw8Roh9W$beAL6nyL z%aYF|bjL7a;zU6?Cr(l&js>!NjV(yhz2}IPd<8QTUOywo87=AxQ5OgoI*wMn>)RSdE?bOQqn zH+M2&IlFS#)#Ct}jYJ!(Ee7oK#qSfyv$d|yZ6sc*skI_0UhB^_#2t5RfaWKx>VvAG zxzXVi&Xi$?TY>-h2`pg6ve_6hy`iVIM_{#1{cjuut4KlRm69aqd(22PAJIW!z3$!j>^Yv$3H zS6ZFh*XwK#XAB1m{EUg>Hz zJ37akKf;-#DQv=bfd*bS^M4q^>hLm3&1MSrcWnp)5${Aqq@xQ0fHsW;8~%ya)R*Jo z&=_gk?kTXr4--KBd5nmmM{jvo%6j(Ge;+nQQ&mY#S|`glnhf^+LdQz(R87spR?EiP z=EmkZq~aYFBYw+$yXPt4LU3>%8WvXXZ8#61F{2O4dO_`x?d4Hh-3%|Fuie{!EJLj-)=N`AsqqEEDLvlWAzyGZ&?Plx6Ffu+2lW`4d#7_-$I7+3h$p+w`mh|&X} zrCMLK@I=OJjQE||D1sB?HhF>ggptC9j|<~WkS9UpqDq24OEh}`lQw3oE28lvqoh<2 zN&C5NK}NTI+xPMF6EIj_CHuwJSU)2m(Edzvg>3%YJ36{}lUB%-=HuYl>Z|s0$Tn|R zg6{QT_W+4wbaa$*t0`QpNZ1bp`)kQ6!b;szKc0F4f*S)DR@K$bg^^4c&KsIJ-T30qus1H#1?*%b>wcVf6W1hEU;T zRBDYMx$XylN{ARadP;$QN}PO8K|zy&)PRugj(hch4tqKf&yy6Hz#%}1{~Pql)iL#d z1AUT@8&Og=qFLMU!23SW<(K5Y=K_+~KtLbm>QQlMy!-qaD=NteN7I6;26+9fQ@C4% z*ysp)|LVyBr%86kGx2ZwB|Ye|l5zuNvxhC|*7Ud&AUP$dp~SD4`u3}1`HLT5Sb;E) zywR;9AUs`QX>2A@#f3;!5)p;)6C#ZIEaxx3Nqs2)^|u_U7%jtI6qI{`U`yt4Y?tvJ;UmG2d*?-fS^-+d_RZJ zjnU-l9^O~ZAM!H2*rB~mP3LP4h&ay& ziXzzOl%msWpJy|qdV@}DOr%^sydq7PL>yB?Z*&j-D^&Pd(u@YMF6!5X-6g746j(G5 zOu?A6-+G5r?{`NYgL~1CbXcwR8LGs^`b0;5$IfWNc8nMdC$sv;Qqk(ojlVBZx5B|O zGHSk^cXv3t!E#=71Tw|%-9@qYkg?ud4XGSn#{A7+1|Zt}d34wfgw-FJj2VG^S-94y zref9|_vsd{oGI?|1uTppk6oIF&QKEpci3n{6p0D$c}vFX}2(sp%pgDEH=7*Nc% z ze-It&T9!Vc zYZIM$l4|6f$~+GI04!?}^*^QleZj(zy+iBu3Hjo+3A;=E@rC`OY=A%u z6BJfDqiMYS8(!tC0Ed6Q^*>ht{d+ag|LxC{-efpn{OLf7H3YEQ@O;(-AAr=_S=12}Xl!h1 zf^_|b_I4T3#C+|8yh7}gur(lE8t6d_d23lJx#^AX2& z_x7;-75s#C^Bj8UMvZmnTJC6pn%p%H;OF3&P|a!GLIFfmrm%YBo0hj!d|z72Rmf)? zSN0qHlpquBFj{Tlu~b8*3MhPzd#oZ8P24F45o4}-s=qG2E74q(KrDEZD{sRUHkHME zwA$>jX|CeT&g&B$r~N~K^)@g2ffx{GdIQ;->aU=$+XS?!!~9Vf{7~bvO6NN=ii$k_ z>{XEV6l=oGov|5FagD$U6>&ho_-jb<-{WUVIzo}Yr}=4hP3M|tVtS8avi5E`@j@+Z zvN>R?5~Y+Sj7F`TsOqEC^b9*2#!UP2mtUEk?wM~N;}|%%L_?IE|CUy>O#rxZZu0yM z5b*=K{@(x{2;G3h<_kGrrhK;i-e<8!6gl}}iqb#IXE&-=HiW?93CIoP5#HUqVSx%L zkgW4mek}ihVk4(|2?a`RW;X%0ynbb$CBJ8Aw#1@64-MSft3mAfVd@~T{E)6XVuUF#qBAUsA>5ZVf#!dU}Kpsg)6Y!C883hcwJ&p!UxjqR?}WBj&%30^C#Y zX@o$QB3}Psy>Zu#0P&*-L2z6g4GE8(JqX6!+_xjf?bwjUR@o-ETdODOGG^Y7?Uvvu z{FhI}=qVAvL1Rq4ylk1AZsEu||L>KA;}(ReENK8B&YD^H_zylRgow3r|qxnF?-en#7FA?RWF*L#DM=|7k+`6V`KpTo9 z#2e1#t!lfWLP>e&UOTD79zGZ{1(IeT{!d(K!bk*8`1w^q9UVkp!-iqs{z`b#&%-vg z#6WMh9an{Tc_oY3BKduP9R^$e-&bTUW=!}fP|~R2C`B5!K$U)YmcAk-(8)nb2@$48 z$Ntj{^x5n8*6Bjo4A7(VwcembGmyhGV9SC!q{1>c&!qYIX9=v^(z@gS*gV5_x}e5D z3Tq@J%abI+EiM$7gb{9em1aJ_4EDX$6-|U0De|u+) zl?^D}NF{+xwQ_ZRs=DSVqxzf}NaV5R44Cq)hD)?`t7t#vPC|p)^C8!`TtyM<;gXdI zpu)dzwW~F4@+4?7hi=c1&iotX3C^kO8tGoPg37N#1=>5H;Qn6OhUC#{mGI;b#~T6x zbmX-9n4PPKu+OSK9G1$8_NQrdZ|%fEZLaEykU#S+6s-~g9*^cw88`24UC3C9ck8+B zYbXF_iRtt3@!z`?g>m)9m9Bis{TLD3;Mcr}2g7at^(O&PbcHP}0kn996x$NG;HZ+4 z;#)4dh1wOLm1Du;U|8+5>HT-TK`fHsV#X%V8K8i65THvsOnKod|JeN~**h|rn2SAY z2%35)FUu>nm(j@4gEBD+fD*11*8_~T2S^%tI4|G!s0WNCOj|6UR}Ta-U{x8cjJ$f$ z40Zh=<@HRORPycbuI(xj;hHe}Rw(#92yjk3AilOAea~E3*$N7ypfk?fEr5v5l8usm z`Uh>)2Or+k#=X8X0C2Z2yFQ{(FwQgSjh?yIpoVUpbkz4&lNp+!v;8QAXppmaVB1Jj_CF4itv26?3jxl+XI{+-*Ik1fCsPKUr zlG&f#wte5MbCnH$&aE0E3A7_^H}`I2o_u7+ zwoFB3%&+}PCYhO;$nKH>9)@4~c$~o93D?O9ONO_D+tq^({rhTxB$YEf{~3_PeKwW& z5i2D@C(ZvAOFJgVx z7v}^}^~f1ervU}9B&S?CO8K28;YqG#;1gi*UcUhKy(C3M03WsmrG$R6g*|}gRvVWE zK++HU$Cmn~*CaegRs!7MVh(T(ZC4Fgp?{)fJ3!h4D;u@0qIVk)lNBwgh|Pt^4zmjt zafDqog`H@6_87_j;aE*2yiQ*TJOjA+_t_g<+?g*57pCE6JnqMDs-9}LmM#Qw7z07H z?Xq}494Zx>!2c2v6wfJJv;6{4E$w&fHlY{>G>@H~LUf4-Votl)X6V=Q*!|4d{Wx4U z*CW-{2Rju*>t@}DMuanGACJ=6|MpKGG8y20O=yUnzK`UibUkrOYHC4P`#y7VJ>z=2 zZv~4C2?K0sHJ72<)`*2ajcPBuHG8T(x# z{tdyX_Dn{pD=Uhou~6>#^9)uWZ(LvgPxFV)cieVrcV&J12?<#|6B}DD(L0}p2b6J3 zTI5d4w!M7mow> z6-gsB-dl-_%_0 z@j;0H9*8q!<>-K1#TrO=GRG2tw(3<~zNZQ!5npy7Oh+nF=(L3RfFd|HYu~4`CRFK@ z&%n0!r=SF|foE;)A<)@H#yVn%={Zz0u$X@UwNj&arlczISu*5b3eS~Gv1Hv32vUYg z_+EK4V0iN+78KwE)C7E2RMh>Pr~ws=-3J&~*>8TCKxK0sn!&sZ=|Ky>$8*oPIG;F7^26|2Q^Vcg`xAg~ z0derI@AKSaHwm?KnaxaSX<-p*^J z=8SW25JUW+{a6T|Ap zyrx86f3go*mz7|7oo)?nUc)8{+=p&B@l2WM)S49ZJ9g7qn}JuSREb)rybdii2a6BI z#An0gjiJ}|i3LvniT?bW^Yc8pd6(Te@tBPN)B^AX*A-3)f45fBsiGyuhs)&q+@#B` zFg1cy-zO#+tTgD{pFQA_FX?baae)Zy+wo$Z@Baf!M%M- zoYNbji8fYk$hg^6ckMFT7gQN|2C@EHxkN<=s@IZ_nT@x;g-2m1ave$a9vxm5Su^>o z$koVNP<^6pA=lS9B=d4JaW?RJoDc+qZ7C?~xCZo*8;X+?M)D;mB9u>sGGAoxsj5{G z)ao9f>Y9*JX(OZ7{ONJ6XKNoICn;HML3(-y<5NrBkC3gpa|f;+8;(}nGf9`n<)-Un ze@OfxClc)oC8pAdyQ7BSZB2*B6|PhARZ*Xk5=8UIBYQtHgB-o$qGIy$f&(Q;jt~Nv zYdCN%t^CSLFk$HBH+}U_Z%3DomF=%I8(|aDrtN#-pFLflF*o-Vcvd7oGn|W>-gU+N z`0yke<5>9R)|WEJ3-o@k)r8CIq9Y-`Z|cU@%Yt7-VyK#VYW2sa+youMS zWx|vxlBYCp*wml4nC-eFazm$5Q&SreFTc0$yA_eUJhWFlIoV@&9R~x0&&0MW-In~U zQgTt&M88lf3?@Lhy*|x&^ezzW7ZkZ!MLFA=MulGZV85@+*Yx^q#cyq6npt$)O{`?W zvv}U22CAo>x^C*?9}2XRdgYKkG>Mj?$a&mgplD|xJU1LEV{gafY<>kSp!LQf^IQi= z;;~+QHZd~;aPe1ZslZ$s)p`&4rquA6k|U+w#tQ*hClZ<_$1_Zx71?>Zk;+`Gs=bHs zSAJw}CRD^-*LwQuLIyLmPF%KfOR>6XFP>_f!wSHm@F96NKR**zKgh>whAu_`<+jMMhc!Zplu zhgiv21vsG?rGxE6gY^8yTDw2DJzp-f5QYsNdP2|fQ5V5kA{3fv73hZW4g^%ey?d4m z&>fPzjNkgk`;fqqW4=Q9&<4Vqd7o}se%%Sq7v5vtXkZV0Aln}JX5bE2m+yCU)KVZX zytZzTA-kvRd=-S&8TFnF6va5+Eh$t~vsK|hg$j43Kc1q8AprmfXYpi7?FQm%f6~Hv z^mF^{wepJcfH5;JE!!hLWZ$5T>$w4i4|aL=B*Fvm)q_sGn zCtpNSk+J6Eh#F}ZndNPv_ed3i$z0V4NA$xTm654w(w{SQ{*Z7*6QJe@j#0Wd>T@YI zGoe^Gd7Ha%q_^Z(es;562Od*0I8TxB&JUy)m$SsuPl7iIwpcOguRmES7H;IUT|85{ zZzN1>C+1Y0wAxlbn@e-+aT0Qs_;>+3wRZ9ABFx{`5CB)$VeW;7F~*BSxS}KbDgy7fydB2e1Oa8s{X7iEOUL+w!;3cReN;G;9`a^ zBeIB)u&h8RII&G1Gl|_n&4&XY*hYcSY)fO7P_fv3zTT1N;6AX_3;& zOc`=|@a4Ox{w1A26dFc%P^iOM68(DUGJY4TP+q_--b{ zcGfgMGTE9&t!^pU9v$zq8nyMaot;^(+pu6=KhSTZY{3E)-_tbTOmjkS%{A4f2q9ob zq{a%(4SOT7?pBXscyl&2!j!sG>(Ob{#?&nn*3x(iVWW&){UQIp%3#^Q$^`b}qI`94 znO9rIV=~72vvgBT>B12i3ifrDR(2p6*f#P9$Iey}ag8zH$!x`i_d4UQk?VcLuD?6t zy#CV}e&cYF^y$pC(VSbWS)V-^lV%4EU0?qj)DM%ZG&m|XE&~?OWv%4qyow<;8fC4o z$ie|lq<8ya{Or48*3|n-gX8RLXA4c>yMxC|?a_{^>XV;Wv4`_zFxJvl21Nb6W^17*_p0qu3c)Qt~)czUasM zR8TxV@ZxzY*axTI6;X+ly23Pg$*^N+@WPbrmm%D1gZf}@G&wc(j{W_n66CXzJ!sQ& zc)wrce&#zsyID0dU16Zbu3wd?Cr+nt;mDqwtM-iTV&nbdc_<#}=VE_zH}LoZ+iDJ) zw#A0xxYQ|KMpO|CbIow$d+hr6*emgozU$}4w(1A=yB;wyXNJa_LZqm|fHsPDrTL(X z)N}>$6D3_NPeV)m%ZLki1J4iMH?|u^kNff2`?D=jl073OzE4(N6M7F9%AeVhAq^y* zx>c#d8cW51>IJ=bWDg9Gm4$OZj!vEvW=0@uWrRj!LOdHCoMe|Svy=12o> zHtCxXIM$%FpzHas$hen%L;YWS&fmE`&W=)v0Gs^!Fzn*nH}y~EM;C(cp2>?5ER~7X z#y%LfZY2+3A*SAM_o#V#Eo81oySh>uLShaMUt`cZywHJHb>HSCI#$^z%HmjtSM zo%Ges$YWJ8X+7le zs_KE3OtJt)EK3u+jOFFYH1p|nt2Mua_)N1aMq9Ol1~puNZd6ZT8;wj>*rqhzkQIo& zm|s{>Zn~T(JOV;xeCrQ18C>*m({n0z{09r)vr@}}(m}|@oVz;iK^wdbG$xyNu2<}LRj>i7h0X5OVHpaKKXNPs_^c#GpJb*Gye6mLi;?LIsOe2a%+j*$zr;$WQqtVVaVG$o}6rb z;UOH&2W&9F^M$je-l80Itj>z+O=+FnrFN!?sP1)x%@8&g@rmsqzq8>OI+=Do}~Ni=^k#qQbzCUfY=+G|F!d5Jp4h^RJ2-E=!#S3R3!SD2SIbKo@0uQT_}h zBTZ=E!QJ=^M6hDQ8^aefFu>WwknVykPc{eDq-%)V4j~+Xp3dn|eTIYIR_|9LjCqk` zWs%#{TEfTI%LuLVK?v{`Wk>&P>$qo6k&-zE{W0piI@iYbP^U8wZ+Ewll;+}H&^F|` zug=aa8{-)~NK>#b}&1?MKrF}iznRs=GelyvDhe#nTe1#VwgL+W0x0lL z=hI#9=|cH}zWzo`P>CVCcr-to#SVAE$bidOF((B?pQx|c0tGwjmulpdc*9p##+-*e zm0fA=K}+RFeuWDJfHF=c7rh-;XZyPSH}m6zBGU9_C=)-B`;IWX<^m1sxo4R&9}B9J zD}0r~LqJbnou%E;fi*SpSM!6Kkrl#+V`7A5W$JFGIu|DG7GXIl(|Y5#vv8vZn+n+s z(W1u@llRSd49N&arcSUTy<1=J4$hc+HH)YGzMc^o7yR38@v*=|MRL|HPaQS`=`0Cl zzG89$=XTquA*yIay1%N~fwg*ZO^+s#!$(|rM_4YmdV%oVT-wc$iO2pD9Q)pJc`@iA z8lpFWJ~EV3Ie94~_fc9^mJpJii%a4=fUcpUP>BU3Ug_j z<;=fBp@VW4rv%xcFLe{+w{NDV=q^7ayx@EV!);1;2lw(N>(`H4P$*cesW=nI88!>9 z+2#gpx|~{n<7+-_P7>E>zbcH49z2HB%z;E2>Mi4X5RWwMLrbUnGkHpl^R|{?h+sG; zOzFb03Gar*SFUA09(O=!;!1Lbg@J}8=QOpZprME^8WbE{)fe&)5Hs9=U+Zv)kMWx6Nu^AD4SFsA~rT&{XSs=Jzro# zKcoo`Z~=1ZQ||P{_$fov6&FzU5!7Q`mb9zHeIcrOlD8a2ZYmg z7;A*g#dyywcuh3CjFlDV@4yG2K+1YWMM^rD*Lw20s3?sL6|O>ixwE3%wHlXdH6UH4 zz&~rZE?ZW}f4QeuGQZfF^~RAk8C8evnKZ+Koq~idYZqL} zH$_hum~J1u{+bpPG@-~C)9?~?H)7#^Yq=#XpeJLf)Ae<+2(+^1RQz+myPUdb9)LFc zYZD8|1SHrRYO$hs3shqMZs!DCLxs?Dv43n)6y3j4q`&PD!(V~l-|%LFDBpi{x|ATz z_z&9r1IX3?@w?d3vpi4H_TRr?Sb*FdF(mDa(u?}9#q{N!>WG!9yJbEtUhT=zhN4Fc z`~9on1r6?BH8X*Kg*JbET94rWhZXJ>9<>MRsIESe0IMDIYex;zpEU5wUtqn+8tjc9 zWa)vR-y-^DrPGiC+L*#WFDCNM%NUjgBMG{Dcf*=E<)oG4jjD?ow7?g4n@W@l)GGH( z9A>iAG}ty@6wX-p_B2yH%!!;qWFXAnAu*VD4_9nGbL1ZoPSv3;n*4(U2oQ46;hE=D z20mA$clt@zopZX~7Sd%aCd1IQeVqVpXvK%vbG~C@98S@2Pe0? zJKm5>bWqh#&VFM9LIQp9T@8SmJ%sIF*gZuE+I_;g^6PtZ(vIC|52s;FD}nlg&-Bz5 zROjcXWo7RXP-)JfMvZAm7o>7?FQVOUqNAjC(IlK)hf37kJ73tUfXydg^?^3y$M58w z3!`i3Ludn8K|@ogWYL^sp(d&Lke?A@3mU8r57W`0J;@APsvcBVA_6(Z&3m{L>$&Tj zx>x=WpgD|Kkh`y<$=Cf_qThBinG^59SEC5)A^~EM7x=6&T9vs(8(aI6wzeA@ZbD>^ z)G&)B7qpeOD_Y21=A+(S7y%S1Y1dga5yT_tA$HOEF&qxYJ0@muv$g36jEv~@>iEgV z!7rR^Z?4#1&dWrn9>{0E0-*I3Blp z!lX@~6W-*Fvl~xe`Uf=3zMb+g1B6I|`gGS#SFf~z+8lBs;~kkdkMFtD6Q4TfwhO>P zktSVe6rAeU;rX(g)A!v?~U?SFiDt51G~mePfQ@-(jZQx)4nMzsAAR zoK8{K*4Caf7C4w}lA>DkB`G~U3@j}h$d6}w0=m%dj(g>umz)0$Bvz4<2Fj;E-?8y= z+OO`LayfZv;`+B3&|3q8&Cg4F_!XwA02=y^uNy+f?s&ooYH2--_OOIwj|G|bgd9~D zrcn^>paJF`JeDxET|buK3m_WT_2l)V6jd`)GUx94{Wl=wwZbP*13258@;ci-7w4;o zTCF6h_<*U#gsrVBf9fV%wa5HmH!WBw%GhTf01AbwSHHX4*tZn+7F8Rk5U3>%(;#yB z^Cp?ad@p)>ejaf19cd}3;^MV)P&)^EN6~eiyIrOqb(wa0m*R(#>;Bw8Ni)RLv?M4T zy9-(dd<6wNB)$7)xjK*0_3E1fTU85Ey_@qZ21!06+ZnwY0>DFN58J=Py+8x;oV4x) zCGZ9>-f};ng4RmtoM~yTh)U8Jsi}J23eN<$_4EyBd%q38Y+sTSkX+(5G}Cy^r#Sun zBV$?6&_W9H8IS%@bGVrvZwGjwfD^TwW}ZFV{z{(%LZaP+MInVLM)2IB5*XH{WDmEy z=NEtMCIhw_D3d?T+2Q7@9p^d>O9R~P*^Xr#0B+FK#!(Ud5WRj98k=ZkmO*Ni2}q3W znWCaF8LM`rCimDDk5oahk<6E2vUq671P3%o(z4?^OnWb$|D?qa1CyyAFaY?n=LQNP z4Q*`?598w^-2gLK{K~gEyKF>T^gSxv`d%1JM5O3BoB`>gJ#E{_%qQ}LZo^?bf%5JO zfZ|<=%YndDfpU=>)<2b*g%vu&utG-2mU<~ECZMVct;m7QLmf_alm zzGx@m8^ENB9=L$S5WCVP=)rVru#u>$Ap+!zKdfiU>5~x>xzFx$W=HnS)@g8)m&0Pg z$@((wW=As|wxiL$J7NBw?&_~LROZdTjkm0#m1$kgj%_`&oRfA}NB*D+fkM%LETO;S zrj2;YjgV6BylkUq1euofjX|29SYdYQAtRK{JSi}lM`!4Eq=#X`2HSSctt@-EkRdge*XrRqy;-gRee)qph`|fzG`}S`VlFNw7CQ+ymvd1Z8WY1); z>`3-jc2=_Ii8F-InU%d)MD`|olTFs|a9#KPdY<3?ynfH~$J3uJ=lFb&&vCrh5rc=L zyX07EckrBAkh2d}8z$(y8J?N{{Fr5rU{9xJjf8|`20S#$t}V-Rri;H1;`I29Jj<8f z?mVxxgCEMYk$jIn99uw_6XQ)<-V_>ZXwI$Lv*}1d&G;(wL@JIzvNontUMC}yly4ni z$3AZtVu=DCAd`wnd~AmFlDl3Om)WLeGr6_$GcgG?ag#fc5*``75ToY>E^Au)O7*0I z*2)u%;$pAR)-h3C-Q7S?Ye1Za)JF0xCZxf78}%+a096;y?V3Gd!P5uJV6n(BeGFp2;9{}_Q#E7^cd z`V|*v@(GmgKF@UIGfUYLFiSn7+=P>i))uGa@Sm-%u^Xa0A7}Pv3~0d+ z3L&tqHq`$e_;M+p8(e+DN3!I+CK{TivMiOn zDZ#xUrM^dC+oIv|Lt0x>wa*>oFJh`4KgAOq1hlP{>CWlNZqV9tncCA!isK06-#|`r znA61i0gH%%L}*DpT)IoXCYc}3KySF=Grzy){$T%;0P#}8<|cVD0kV*P9~i~Zyovg` zMNceH)`{pk>muu%tJbOAPDEXio4;PS+?3^jX(=qX5Xvm=g-17({flv{qLli^>_A+) z(kG6j*UZdS{@0IPqt_4dKti1UKH}w=m$cUzZ#>sAW{}UAuDI&Xz1xvW*3zH(&PJ?x z*eZTqN_E|E9UUDtT4vcd@$xu7cwV%xs^@hC{G`Z2XG&xKRE@vr!#3Cw$7i0BzrHSX z7^2@hGV;W^3_ncrhYUX1>m7Oh>~Ah?L}zPcf9+lqJpRlzQd|3>r6r5OE*HwN7>BtQ zZUcx@c+!rWPt#d!c-NLfIytR1vQ{$Tqr&*o+=tZgp;nNOCsK5 zPHJ4(IPJT`{I>edXPfui)8eDA1j=AMAc!X-|1DW2a@k)zg{Dyg6*=s7t% znOzl#a}-ER1`p@^53kwuY)L03jwN+@Nbq{sa}5p(^bI*pqmaj8!yIMVsb6V`zM8HN z0H$fl6F2!75dBYTEyI^y&m3k!+ViPk*>-&(-S*@$-9G>RJiF($Fv*Uh?(;ARm^9#% zTONE_Rc3jkBUVq&bm5n(G{nd_IubZIO9g(zzVui`w=C6pO1l}7gAxSYooYGnWqttY zap6bVWWsRNbusE|=}Hc7Xfnn6Hse)tg6AnLYDfp-vA)0JlQ8b9XYT}G9NY_p6*7Cj zao!zDJ@+*_ag@`j92wCM78dSouelj^cRHxpUV7Os8X%)BrS~bWHyjw|*XdqfhP(FPtu&!c z1m~pFNnlOuJzeD2o>^ZarkmisHI>y06-&>o_M8aXUUifqCpcvv#0-*9B6t;esB}}- zVb~ks3FSRiYVE&pgb&6YaWgZ=4v#yq*uS6#EbyenPJ6|xwJ+Q$Uaf>*n8uWH)J}W3 z6PFg*$(ImuE4?pdT~}Ya3a~Vx0nwRp8cG|=-WN3OH~TqTK!1HZjBhH~QU@~=LP*?O zB{4kCO0VfYaPeG+@C{K1k$FtqGu(3FJ#>0;WLtgaub5n4GGZu{d|$q=y%FfQB(*Hz zcdN2L)>O2RCLJl5+Dt1y!W!RL((Th`kh=9Q%NjV!Od6FG%`fu*e$tzT2zpLzL`E+9BUHQ^zs z*mi?ola-ruQ3F;1)Ow>zQxdPghBe|Ie$15gCbe2!#bpI%JA}D&vvyC;Mq#(iZF925 zee6beg1nK@Z7Y#ez>*<{`D$@-e!WR2;L&3+dS=qcw^0M0R z^pJ9HqnsEz!s_sw6zT|=**=rZm;t2ollVq_CKgN(#uwf-v0x>+z_`{UEj&o!Tu zX}=bx-Zv~^YkJid_Aq4-Vj=~v_*bi`OV#qmJiSr+QPo)WBBTb}Ri8AvYqT?{_sy5KCRO*R~k)2xQmz{dTpkJ|& zt0{u)Zl%aZhU?MMOrw0-r=?OFY_-Iwz4FIgBi7l88m@sxkpk2d z)32YD-C}7w7qG~YRnC*gI6MJIRlcGA5uMQVl?40tyKeB3^+pFSYj z%}e}BrF3V}!D)kBxe^NwYb5wT(v&0uyZI`el(gJ7_ zJH{VK$WTfWoA{tAncZQX!pd;o-r2FKOLQTzEJ+ak61Bfi039xOseU+dD`%N;^ZP07 znKp=yx^-vX!=Zw4X7(M%9o>S7Lr*wCUL<23WEoScw9Z(7nF<-`LMNxH=moU$1&5cD z=YFGFBmwwNL!S8zrQBeQtJi^xR8c+U9Zy|Sy;CyUmdsCN&~HW(ehA0%n@)_{v-7Zm zjR~w$XTCCH?C!JKiLIJDMjuW$c+Y)*@EnSu%BEb2WokhE&`M3|(Mc00V5uKp8wDdy8 zP*t|#*%0_(GA=2q(HC(yiq0ZQzNh)bmf-B>Ki3{f;QKChJwmevf z6Ow>Gw6LUgqni!Bi<#-O)YrQD?6bjSm61*DPO8jH?MqMX7~4v!cnn@S4A!%W=J=r z*=RaK@h_oA+JH3t`Wv?5fKHU?#S1_n(?Tqd`rJxMM{x@gV#>r(iE)S_D z^EV=3V*%!t9T(u}t!ROJ`{xOx72dR4HTeI$t|kd8d(oV}5kA+gKHyGoM-oF!TtxQ} zN2I6V3e3_YMVro@345q(^FzzlFh5}eqJ0(g3qYmv!n4pBno${U^2{GdEFRPn4|u6E zRH!gvLM$%v1cEi>vTv%0D{CwX=nkdwdD2xZEv)pVK&E41U={4fR(0F78a+M zF=C!}H(*Ef_No2|PeCAi&%*fAbGQY^R#>*pVOZ{G!iT!raD|SSrb~4HUY_G1cj|vW2j6QwkkSebDg|V~w!$Ypv z)HrXfiGu3lk|#)V>`N(X>WB!|WfM@GMNQgr@Hb3ffyFx+t?M|qu3z112w}&3)%D!0 zWgC*B|2MUj1Ru%id_jKxV&=1eR2xo;m+rOOH4kphh)-eel(Trp+UFK zLZ8yLp&K1LNl;hZU>gzLS+X2^@S4o->tAKBW|N!%!wxS4Xqoi3dql}yx_x`O(AAG&fSgzS{VNw2Gb z2E7BzS~!XgeonB!9{H{g_c9^6%AgT}Y zT{fsO?%mIeNZ|2{mxFvg$SKEDN*j#766hNr`h0Elbk%ETfGE39tG&B6@^S}tkl)rZ z^;)4DWwyQcg9t4evdU7zK%vWT*&Hf9att^!7hTwAH|othv(l#t9#P_N0bQ0C1_Z9j z(1!j&xf4=w@$p{)kHjAHQp!_zb#Yz*_soxW8|kB1lB&iFNyvD^q!}gn=CaYtG%G9T z@D<)=v!Q&_XWMH1Ju`ia^--b=)R3Q_NSB;qV1brxa7a>s@R2nXIFhT!(5Su%2;en- z$$Z^%`}H%UGNcS6HN99+#UJnHZ2l+0m0a^;1b#f)^q$yD9}yr!-XzaYhKL zpsu{}qJwKfsY?+aP(NW>ZQfqLe~twqm!9)*Gkkrw4Y*2cV&XIOevws9fh(rZ{|JSd zOXC`}cIo%dhI>Wk4VXdz5K!S_50}LLUZEqhvv^8GnGbD^FvvCJTI3*(Q6fF42K)aC zdhJG3tKTCX81OcRZ;kYdgHbTDal9PSY9NheV85J#^1Lwh{MBInot~PoccdI2WO9VC z0AwYOmv~96uuReNys00p^+2cgHTDnI{Hl{~-&KASQ`28N$C`ZJvw_a)N>rja|Y>4YW1;~qGux&aIhvD$+$h1 zAIz;iLSW*!L%j_`0eJ@jtIsJJ%Tpl8zf@{djohQERR9$Jg5rX!`%)T6FFe8C)>rn5 z??MVOv%>GfaGx3v?S9Y*bh|_O{#Tn*0<=M|d1uDQxwwm;J@KdTtKqEtk^|4XMWGY!g?wzhDTh#JdP;}nhcRz)Ipfy z9eTr?#00mgD5rvWGSs3HOCM=SWxSw;w41@w62HbX5$B9XLj+)lpi%k{>~M3~Fh)gD zV{o84^S0;Rj~gS$51|`Xe!ge@Aioiv0BXSDXJ>o>$oMQxfM)0#hWj=}ynH#vBc2-n zp)oPfO={(T6cVSC_;Gp3ASdAy;DLQBa<;V&Xc1_E;>j&b@Io^ETC$vt8r=a%e)ahk z<1o87^O{tSfJ$RYQ_swIcAb`~RL{g#uY)V5MZutb27Ta|QR{4{DaIp80`Id9)p55m znv=O~1k*@t)4^sXeu{##S}`zRkpCL5Gz@+tT+!Xp>YO8C7!zR=m3V9k6EeIf{)zlv z2s+yXL@nuRmNN4EL_TwKTp*Lk$#wN=8_l|MP*mb z5TRWfp$mVw)Zn3++W6vyUf5F$I5&6=)8@(3OT%)iJIAo^AV9%tJ!)$DFgYtr{Q|QE z76UGIP!EA(N5X0>ySUV=w~{%M5=D<2gp&^^vZw!q?E+|K%}2r<2uQTcust0t+|Gl! z1&!;J{yr`%tBixP$QZ4y+BYU~n_h(|#MoLUdwOmB>8+pK=BFq^W>uB0^vro z!G0Dmwe5d@I~^Zhg-;l!W{i)SY)?i6r8v3wEPE;Bf1|ItWDGiHf$6>EYikgP- zz-GLW&qs9BGFLtCvBEOBa;RwbzVMlZ^ z*3b_e@w*to!8@3*oT9@k3wnMZr+gwzI$w6!_ne_5#8oEb>VC*!Cjyy<^#+Q_-GYr2 zXiW^syznmz{P`hHa9I#jDFhD9lzi%6o{#JsG7WqJvu4lN)i%6YRm)V`j~&1qz$ zr+p=X<1hGLIWu10CUo(*^`@>K4^6rE^ZW|b-_Yd$rSL`4H5xGgaZ3wrS&+r zaK)4q-KxqJ&#zxOkG9DQ4P}=#R#vHgUub-f&%K8J(fE3)l|UNiHZFDi>{VsvehD`z zH*@#G{OZ&e`VLFS9;;nYIAajveDtd2(inPUfhThQ^SD3aW5JeEP6mpS6jTQyBe7tfN;pr*G}9|e$whaQAL(O7w;>`qP>EK|g#Yrxi}(Xg?;hlyFod|%GzBY6{@2p0&Rm~g#>a&iS) z8>rpgrTGWV0a!Gzei4>x(M%O4OGW`$KE|#~J@vigKJ1OLXHL|R_VmPJ&1W?+dqF}+ zCiU!~o~l+jb;&<*#6;lU7%Ue9kSm0BSq%W`3{dCwTMAQ9#7*z__iPM5a+sbZ|LCwP zeA)MO5sj55^u^uxgE&?*#{f6slRg^;4z%BoOy^(A1ksAZNB(yGe0FwiZxX*Bob%rl zq`7t#);$bA>ckS3YP|_gXqB}^bnW)?(yQ^T19F?D?oR<#qw9;1!;Di_SvrLroD{B0 z+f0pSdj^|c)-g0^%%W0G@4!iT0!n2WFdywH zttRcag@dEfKQvUV-kW{eDTACF6#-UKc@*u(RKfWgN!Efl+_%BLRm*Fga}75d#<}3w zqv8^8o?lyogT8sycb}7?H0Y`roDJ-#f(XSe5f3gQ93K0N2eLUjt^cZfR;FNvm4VMi z?IJfZLiI_(udB<~IL~p^T-&X&J|udYcCUr*tI%nBeF*>)J+kuraciCP))P*M$;URd ze@+#WQv_k^+K94|*TE3&px-%e-kM*@E2uO1c_7Q))`4wc>-wne*kQu$Z94IVxH^iR zivi0=xAH*F0y>;KvJDTc2b&?al?rKl#+D?zX`p{Jd*~rfq>R0^_O75bCA!$jS8qiNtey>ZRj4jPU9T3w-ZNAkfnFDsbOE|}&Wwb~l>-$frB*PTMpMku_9_ux10@>`nq1+{qqy+uO5e_ey^b@3Il6h@*zGn_>?M0)+Yp zYdCv1i1PjsoOuKSzX#gpkJ(QSh8*_PdU{9C8zkWP4Bfd#hk)B41_QpHr|!osc&l*` z7G(Y)mIUGx!&}EBK${Blt`!miIC9fABvbgeM=QSGrV!m-Fmu{3DK06b+Luh@b2`Gp z>=Ig)gHGVRfd3^0VOPAxWY3$(&wAqO>JBWY`}{}*4BfP?_hqyIhWTbWG=2;~H z)eYDv6+k->ojd(ZzW|b*mD*$t>*bl<12^w#EkpuQwMS4myF(!CX2)1HX{x0?^xi}` z7&xGZlw3WlAAnyq;8~2wQK+4I1C!xMasWnll>u)zma<0Z z$-o@}8-?YxzBgZfJJHWuCFy#E3?c?vxX9qlwr9VFNh|CIG^8*jf_GABuRwr`4ncz~ z3`?1ODkgy~)GY308*Cic%o%4gW&xdNQ?vZ)0n_#EmW9NNj@oe@wy<=7_E|x4 z^v*>GRyBhb8IvKn`-{bNtl%Ak=ToD%&CLn?r_rjItphpcDjw1o5{WyFT!Nggxtc-$oml&eZ9zFQV4c#U5LVHBYALC zIrBteU1kG;^Gl*%PMb{a74e7PZg*x?O(KpoNfzG6mPVKEm%wXf5L~6~T<`#P$xWAY zqQKKn%w}{HA)Z|lnd-#xJfQ=v6_*AV9&~rib9bosd#1c)0$SWp`M$I#H};= z9=_lBK3F3u=xJu_dO_B$^DIpSVNusBZuV>2@9=^wX!}7y= zc2tZjLHquNB6@jk^hVML&=t3Je!FD0sF9rVB3^Ff`kXgb$O}Tt5(*Rter@-g?x}$8 zAq7yw019)vOGgw=9IYZlitbo27uowrJ8Za#qvAEi_|{NGd7rx*gfQwwabufEzTZzZ zU{rLzA=AF!2H3h$u?VlVDL`pj-)Qd7xyyzojiKL~oo{WZIg872_+Ki}N56I5c~^eg z<$kwh{ol|rLwRW#W9MzersUJINzb;W$Jm2SuGhOy`MFEH&AjLJBRtHSzd)^EV)sh? zo?EM;yK|7Up@2Y5{sU0YxR3;ky1P?Z4bj%C-}L8u#~gFdw5uut`fsA7cQ@-nAZyU( ztMY=pxGG-f-DDWQnBmm*0~3RPWS%pArP~!&?HwJEX+IUeQgDTX1(ZS#TH4lOQR09b z!ajxyce2oHs*80x8eu#Yw{jMlCuGF_5pO2{ZcvE-Ct4ubd2}6Y4o*IO{ zD_iRl`S96e0xS({9G~WWjuxJO{k0G~BQNe4Zn`d%eI)yynzQHy#BOwNeT4V-Rq$qX zfB)_!r_km2Ao*PTwx9An2SEK26Q@w=XM0|ioEK`DR`lG*a|Y@y;FPu4L1lDhtiwxaK$ra5-}c16HLOPeP38LTqlx=mZl;+5=l9;ae`{PN n6)uFga*qGRHURif?S6(ZT)3&Z)!hFI1AfUoQIIGQGkE)7>~?D` literal 0 HcmV?d00001 diff --git a/admiral-router/vehicles/bike.lua b/admiral-router/vehicles/bike.lua new file mode 100644 index 0000000..b0c7482 --- /dev/null +++ b/admiral-router/vehicles/bike.lua @@ -0,0 +1,682 @@ +-- Bicycle profile + +api_version = 4 + +Set = require('lib/set') +Sequence = require('lib/sequence') +Handlers = require("lib/way_handlers") +find_access_tag = require("lib/access").find_access_tag +limit = require("lib/maxspeed").limit +Measure = require("lib/measure") + +function setup() + local max_speed = 30 + local default_speed = 11 + local walking_speed = 5 + + return { + properties = { + u_turn_penalty = 20, + traffic_light_penalty = 2, + --weight_name = 'cyclability', + weight_name = 'duration', + process_call_tagless_node = false, + max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s + use_turn_restrictions = false, + continue_straight_at_waypoint = false, + mode_change_penalty = 30, + }, + + default_mode = mode.cycling, + default_speed = default_speed, + walking_speed = walking_speed, + oneway_handling = true, + turn_penalty = 6, + turn_bias = 1.4, + use_public_transport = true, + + allowed_start_modes = Set { + mode.cycling, + mode.pushing_bike + }, + + barrier_blacklist = Set { + 'yes', + 'wall', + 'fence' + }, + + access_tag_whitelist = Set { + 'yes', + 'permissive', + 'designated' + }, + + access_tag_blacklist = Set { + 'no', + -- When a way is tagged with `use_sidepath` a parallel way suitable for + -- cyclists is mapped and must be used instead (by law). This tag is + -- used on ways that normally may be used by cyclists, but not when + -- a signposted parallel cycleway is available. For purposes of routing + -- cyclists, this value should be treated as 'no access for bicycles'. + 'use_sidepath' + }, + + restricted_access_tag_list = Set { }, + + restricted_highway_whitelist = Set { }, + + -- tags disallow access to in combination with highway=service + service_access_tag_blacklist = Set { }, + + construction_whitelist = Set { + 'no', + 'widening', + 'minor', + }, + + access_tags_hierarchy = Sequence { + 'bicycle', + 'vehicle', + 'access' + }, + + restrictions = Set { + 'bicycle' + }, + + cycleway_tags = Set { + 'track', + 'lane', + 'share_busway', + 'sharrow', + 'shared', + 'shared_lane' + }, + + opposite_cycleway_tags = Set { + 'opposite', + 'opposite_lane', + 'opposite_track', + }, + + -- reduce the driving speed by 30% for unsafe roads + -- only used for cyclability metric + unsafe_highway_list = { + primary = 0.5, + secondary = 0.65, + tertiary = 0.8, + primary_link = 0.5, + secondary_link = 0.65, + tertiary_link = 0.8, + }, + + service_penalties = { + alley = 0.5, + }, + + bicycle_speeds = { + cycleway = default_speed, + primary = default_speed, + primary_link = default_speed, + secondary = default_speed, + secondary_link = default_speed, + tertiary = default_speed, + tertiary_link = default_speed, + residential = default_speed, + unclassified = default_speed, + living_street = default_speed, + road = default_speed, + service = default_speed, + track = default_speed, + path = default_speed + }, + + pedestrian_speeds = { + footway = walking_speed, + pedestrian = walking_speed, + steps = 2 + }, + + railway_speeds = { + train = 10, + railway = 10, + subway = 10, + light_rail = 10, + monorail = 10, + tram = 10 + }, + + platform_speeds = { + platform = walking_speed + }, + + amenity_speeds = { + parking = default_speed, + parking_entrance = default_speed + }, + + man_made_speeds = { + pier = walking_speed + }, + + route_speeds = { + ferry = 5 + }, + + bridge_speeds = { + movable = 5 + }, + + surface_speeds = { + asphalt = default_speed, + ["cobblestone:flattened"] = 10, + paving_stones = 10, + compacted = 10, + cobblestone = 6, + unpaved = 6, + fine_gravel = 6, + gravel = 6, + pebblestone = 6, + ground = 6, + dirt = 6, + earth = 6, + grass = 6, + mud = 3, + sand = 3, + sett = 10 + }, + + classes = Sequence { + 'ferry', 'tunnel' + }, + + -- Which classes should be excludable + -- This increases memory usage so its disabled by default. + excludable = Sequence { +-- Set {'ferry'} + }, + + tracktype_speeds = { + }, + + smoothness_speeds = { + }, + + avoid = Set { + 'impassable', + 'construction' + } + } +end + +function process_node(profile, node, result) + -- parse access and barrier tags + local highway = node:get_value_by_key("highway") + local is_crossing = highway and highway == "crossing" + + local access = find_access_tag(node, profile.access_tags_hierarchy) + if access and access ~= "" then + -- access restrictions on crossing nodes are not relevant for + -- the traffic on the road + if profile.access_tag_blacklist[access] and not is_crossing then + result.barrier = true + end + else + local barrier = node:get_value_by_key("barrier") + if barrier and "" ~= barrier then + if profile.barrier_blacklist[barrier] then + result.barrier = true + end + end + end + + -- check if node is a traffic light + local tag = node:get_value_by_key("highway") + if tag and "traffic_signals" == tag then + result.traffic_lights = true + end +end + +function handle_bicycle_tags(profile,way,result,data) + -- initial routability check, filters out buildings, boundaries, etc + data.route = way:get_value_by_key("route") + data.man_made = way:get_value_by_key("man_made") + data.railway = way:get_value_by_key("railway") + data.amenity = way:get_value_by_key("amenity") + data.public_transport = way:get_value_by_key("public_transport") + data.bridge = way:get_value_by_key("bridge") + + if (not data.highway or data.highway == '') and + (not data.route or data.route == '') and + (not profile.use_public_transport or not data.railway or data.railway=='') and + (not data.amenity or data.amenity=='') and + (not data.man_made or data.man_made=='') and + (not data.public_transport or data.public_transport=='') and + (not data.bridge or data.bridge=='') + then + return false + end + + -- access + data.access = find_access_tag(way, profile.access_tags_hierarchy) + if data.access and profile.access_tag_blacklist[data.access] then + return false + end + + -- other tags + data.junction = way:get_value_by_key("junction") + data.maxspeed = Measure.get_max_speed(way:get_value_by_key ("maxspeed")) or 0 + data.maxspeed_forward = Measure.get_max_speed(way:get_value_by_key("maxspeed:forward")) or 0 + data.maxspeed_backward = Measure.get_max_speed(way:get_value_by_key("maxspeed:backward")) or 0 + data.barrier = way:get_value_by_key("barrier") + data.oneway = way:get_value_by_key("oneway") + data.oneway_bicycle = way:get_value_by_key("oneway:bicycle") + data.cycleway = way:get_value_by_key("cycleway") + data.cycleway_left = way:get_value_by_key("cycleway:left") + data.cycleway_right = way:get_value_by_key("cycleway:right") + data.duration = way:get_value_by_key("duration") + data.service = way:get_value_by_key("service") + data.foot = way:get_value_by_key("foot") + data.foot_forward = way:get_value_by_key("foot:forward") + data.foot_backward = way:get_value_by_key("foot:backward") + data.bicycle = way:get_value_by_key("bicycle") + + speed_handler(profile,way,result,data) + + oneway_handler(profile,way,result,data) + + cycleway_handler(profile,way,result,data) + + bike_push_handler(profile,way,result,data) + + + -- maxspeed + limit( result, data.maxspeed, data.maxspeed_forward, data.maxspeed_backward ) + + -- not routable if no speed assigned + -- this avoid assertions in debug builds + if result.forward_speed <= 0 and result.duration <= 0 then + result.forward_mode = mode.inaccessible + end + if result.backward_speed <= 0 and result.duration <= 0 then + result.backward_mode = mode.inaccessible + end + + safety_handler(profile,way,result,data) +end + + + +function speed_handler(profile,way,result,data) + + data.way_type_allows_pushing = false + + -- speed + local bridge_speed = profile.bridge_speeds[data.bridge] + if (bridge_speed and bridge_speed > 0) then + data.highway = data.bridge + if data.duration and durationIsValid(data.duration) then + result.duration = math.max( parseDuration(data.duration), 1 ) + end + result.forward_speed = bridge_speed + result.backward_speed = bridge_speed + data.way_type_allows_pushing = true + elseif profile.route_speeds[data.route] then + -- ferries (doesn't cover routes tagged using relations) + result.forward_mode = mode.ferry + result.backward_mode = mode.ferry + if data.duration and durationIsValid(data.duration) then + result.duration = math.max( 1, parseDuration(data.duration) ) + else + result.forward_speed = profile.route_speeds[data.route] + result.backward_speed = profile.route_speeds[data.route] + end + -- railway platforms (old tagging scheme) + elseif data.railway and profile.platform_speeds[data.railway] then + result.forward_speed = profile.platform_speeds[data.railway] + result.backward_speed = profile.platform_speeds[data.railway] + data.way_type_allows_pushing = true + -- public_transport platforms (new tagging platform) + elseif data.public_transport and profile.platform_speeds[data.public_transport] then + result.forward_speed = profile.platform_speeds[data.public_transport] + result.backward_speed = profile.platform_speeds[data.public_transport] + data.way_type_allows_pushing = true + -- railways + elseif profile.use_public_transport and data.railway and profile.railway_speeds[data.railway] and profile.access_tag_whitelist[data.access] then + result.forward_mode = mode.train + result.backward_mode = mode.train + result.forward_speed = profile.railway_speeds[data.railway] + result.backward_speed = profile.railway_speeds[data.railway] + elseif data.amenity and profile.amenity_speeds[data.amenity] then + -- parking areas + result.forward_speed = profile.amenity_speeds[data.amenity] + result.backward_speed = profile.amenity_speeds[data.amenity] + data.way_type_allows_pushing = true + elseif profile.bicycle_speeds[data.highway] then + -- regular ways + result.forward_speed = profile.bicycle_speeds[data.highway] + result.backward_speed = profile.bicycle_speeds[data.highway] + data.way_type_allows_pushing = true + elseif data.access and profile.access_tag_whitelist[data.access] then + -- unknown way, but valid access tag + result.forward_speed = profile.default_speed + result.backward_speed = profile.default_speed + data.way_type_allows_pushing = true + end +end + +function oneway_handler(profile,way,result,data) + -- oneway + data.implied_oneway = data.junction == "roundabout" or data.junction == "circular" or data.highway == "motorway" + data.reverse = false + + if data.oneway_bicycle == "yes" or data.oneway_bicycle == "1" or data.oneway_bicycle == "true" then + result.backward_mode = mode.inaccessible + elseif data.oneway_bicycle == "no" or data.oneway_bicycle == "0" or data.oneway_bicycle == "false" then + -- prevent other cases + elseif data.oneway_bicycle == "-1" then + result.forward_mode = mode.inaccessible + data.reverse = true + elseif data.oneway == "yes" or data.oneway == "1" or data.oneway == "true" then + result.backward_mode = mode.inaccessible + elseif data.oneway == "no" or data.oneway == "0" or data.oneway == "false" then + -- prevent other cases + elseif data.oneway == "-1" then + result.forward_mode = mode.inaccessible + data.reverse = true + elseif data.implied_oneway then + result.backward_mode = mode.inaccessible + end +end + +function cycleway_handler(profile,way,result,data) + -- cycleway + data.has_cycleway_forward = false + data.has_cycleway_backward = false + data.is_twoway = result.forward_mode ~= mode.inaccessible and result.backward_mode ~= mode.inaccessible and not data.implied_oneway + + -- cycleways on normal roads + if data.is_twoway then + if data.cycleway and profile.cycleway_tags[data.cycleway] then + data.has_cycleway_backward = true + data.has_cycleway_forward = true + end + if (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) then + data.has_cycleway_forward = true + end + if (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) then + data.has_cycleway_backward = true + end + else + local has_twoway_cycleway = (data.cycleway and profile.opposite_cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) + local has_opposite_cycleway = (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) + local has_oneway_cycleway = (data.cycleway and profile.cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) + + -- set cycleway even though it is an one-way if opposite is tagged + if has_twoway_cycleway then + data.has_cycleway_backward = true + data.has_cycleway_forward = true + elseif has_opposite_cycleway then + if not data.reverse then + data.has_cycleway_backward = true + else + data.has_cycleway_forward = true + end + elseif has_oneway_cycleway then + if not data.reverse then + data.has_cycleway_forward = true + else + data.has_cycleway_backward = true + end + + end + end + + if data.has_cycleway_backward then + result.backward_mode = mode.cycling + result.backward_speed = profile.bicycle_speeds["cycleway"] + end + + if data.has_cycleway_forward then + result.forward_mode = mode.cycling + result.forward_speed = profile.bicycle_speeds["cycleway"] + end +end + +function bike_push_handler(profile,way,result,data) + -- pushing bikes - if no other mode found + if result.forward_mode == mode.inaccessible or result.backward_mode == mode.inaccessible or + result.forward_speed == -1 or result.backward_speed == -1 then + if data.foot ~= 'no' then + local push_forward_speed = nil + local push_backward_speed = nil + + if profile.pedestrian_speeds[data.highway] then + push_forward_speed = profile.pedestrian_speeds[data.highway] + push_backward_speed = profile.pedestrian_speeds[data.highway] + elseif data.man_made and profile.man_made_speeds[data.man_made] then + push_forward_speed = profile.man_made_speeds[data.man_made] + push_backward_speed = profile.man_made_speeds[data.man_made] + else + if data.foot == 'yes' then + push_forward_speed = profile.walking_speed + if not data.implied_oneway then + push_backward_speed = profile.walking_speed + end + elseif data.foot_forward == 'yes' then + push_forward_speed = profile.walking_speed + elseif data.foot_backward == 'yes' then + push_backward_speed = profile.walking_speed + elseif data.way_type_allows_pushing then + push_forward_speed = profile.walking_speed + if not data.implied_oneway then + push_backward_speed = profile.walking_speed + end + end + end + + if push_forward_speed and (result.forward_mode == mode.inaccessible or result.forward_speed == -1) then + result.forward_mode = mode.pushing_bike + result.forward_speed = push_forward_speed + end + if push_backward_speed and (result.backward_mode == mode.inaccessible or result.backward_speed == -1)then + result.backward_mode = mode.pushing_bike + result.backward_speed = push_backward_speed + end + + end + + end + + -- dismount + if data.bicycle == "dismount" then + result.forward_mode = mode.pushing_bike + result.backward_mode = mode.pushing_bike + result.forward_speed = profile.walking_speed + result.backward_speed = profile.walking_speed + end +end + +function safety_handler(profile,way,result,data) + -- convert duration into cyclability + if profile.properties.weight_name == 'cyclability' then + local safety_penalty = profile.unsafe_highway_list[data.highway] or 1. + local is_unsafe = safety_penalty < 1 + + -- primaries that are one ways are probably huge primaries where the lanes need to be separated + if is_unsafe and data.highway == 'primary' and not data.is_twoway then + safety_penalty = safety_penalty * 0.5 + end + if is_unsafe and data.highway == 'secondary' and not data.is_twoway then + safety_penalty = safety_penalty * 0.6 + end + + local forward_is_unsafe = is_unsafe and not data.has_cycleway_forward + local backward_is_unsafe = is_unsafe and not data.has_cycleway_backward + local is_undesireable = data.highway == "service" and profile.service_penalties[data.service] + local forward_penalty = 1. + local backward_penalty = 1. + if forward_is_unsafe then + forward_penalty = math.min(forward_penalty, safety_penalty) + end + if backward_is_unsafe then + backward_penalty = math.min(backward_penalty, safety_penalty) + end + + if is_undesireable then + forward_penalty = math.min(forward_penalty, profile.service_penalties[data.service]) + backward_penalty = math.min(backward_penalty, profile.service_penalties[data.service]) + end + + if result.forward_speed > 0 then + -- convert from km/h to m/s + result.forward_rate = result.forward_speed / 3.6 * forward_penalty + end + if result.backward_speed > 0 then + -- convert from km/h to m/s + result.backward_rate = result.backward_speed / 3.6 * backward_penalty + end + if result.duration > 0 then + result.weight = result.duration / forward_penalty + end + + if data.highway == "bicycle" then + safety_bonus = safety_bonus + 0.2 + if result.forward_speed > 0 then + -- convert from km/h to m/s + result.forward_rate = result.forward_speed / 3.6 * safety_bonus + end + if result.backward_speed > 0 then + -- convert from km/h to m/s + result.backward_rate = result.backward_speed / 3.6 * safety_bonus + end + if result.duration > 0 then + result.weight = result.duration / safety_bonus + end + end + end +end + + + +function process_way(profile, way, result) + -- the initial filtering of ways based on presence of tags + -- affects processing times significantly, because all ways + -- have to be checked. + -- to increase performance, prefetching and initial tag check + -- is done directly instead of via a handler. + + -- in general we should try to abort as soon as + -- possible if the way is not routable, to avoid doing + -- unnecessary work. this implies we should check things that + -- commonly forbids access early, and handle edge cases later. + + -- data table for storing intermediate values during processing + + local data = { + -- prefetch tags + highway = way:get_value_by_key('highway'), + + route = nil, + man_made = nil, + railway = nil, + amenity = nil, + public_transport = nil, + bridge = nil, + + access = nil, + + junction = nil, + maxspeed = nil, + maxspeed_forward = nil, + maxspeed_backward = nil, + barrier = nil, + oneway = nil, + oneway_bicycle = nil, + cycleway = nil, + cycleway_left = nil, + cycleway_right = nil, + duration = nil, + service = nil, + foot = nil, + foot_forward = nil, + foot_backward = nil, + bicycle = nil, + + way_type_allows_pushing = false, + has_cycleway_forward = false, + has_cycleway_backward = false, + is_twoway = true, + reverse = false, + implied_oneway = false + } + + local handlers = Sequence { + -- set the default mode for this profile. if can be changed later + -- in case it turns we're e.g. on a ferry + WayHandlers.default_mode, + + -- check various tags that could indicate that the way is not + -- routable. this includes things like status=impassable, + -- toll=yes and oneway=reversible + WayHandlers.blocked_ways, + + -- our main handler + handle_bicycle_tags, + + -- compute speed taking into account way type, maxspeed tags, etc. + WayHandlers.surface, + + -- handle turn lanes and road classification, used for guidance + WayHandlers.classification, + + -- handle allowed start/end modes + WayHandlers.startpoint, + + -- handle roundabouts + WayHandlers.roundabouts, + + -- set name, ref and pronunciation + WayHandlers.names, + + -- set classes + WayHandlers.classes, + + -- set weight properties of the way + WayHandlers.weights + } + + WayHandlers.run(profile, way, result, data, handlers) +end + +function process_turn(profile, turn) + -- compute turn penalty as angle^2, with a left/right bias + local normalized_angle = turn.angle / 90.0 + if normalized_angle >= 0.0 then + turn.duration = normalized_angle * normalized_angle * profile.turn_penalty / profile.turn_bias + else + turn.duration = normalized_angle * normalized_angle * profile.turn_penalty * profile.turn_bias + end + + if turn.is_u_turn then + turn.duration = turn.duration + profile.properties.u_turn_penalty + end + + if turn.has_traffic_light then + turn.duration = turn.duration + profile.properties.traffic_light_penalty + end + if profile.properties.weight_name == 'cyclability' then + turn.weight = turn.duration + end + if turn.source_mode == mode.cycling and turn.target_mode ~= mode.cycling then + turn.weight = turn.weight + profile.properties.mode_change_penalty + end +end + +return { + setup = setup, + process_way = process_way, + process_node = process_node, + process_turn = process_turn +} diff --git a/admiral-router/vehicles/car.lua b/admiral-router/vehicles/car.lua new file mode 100644 index 0000000..0233037 --- /dev/null +++ b/admiral-router/vehicles/car.lua @@ -0,0 +1,504 @@ +-- Car profile + +api_version = 4 + +Set = require('lib/set') +Sequence = require('lib/sequence') +Handlers = require("lib/way_handlers") +Relations = require("lib/relations") +find_access_tag = require("lib/access").find_access_tag +limit = require("lib/maxspeed").limit +Utils = require("lib/utils") +Measure = require("lib/measure") + +function setup() + return { + properties = { + max_speed_for_map_matching = 130/3.6, -- 180kmph -> m/s + -- For routing based on duration, but weighted for preferring certain roads +-- weight_name = 'routability', + -- For shortest duration without penalties for accessibility + weight_name = 'duration', + -- For shortest distance without penalties for accessibility +-- weight_name = 'distance', + process_call_tagless_node = false, + u_turn_penalty = 20, + continue_straight_at_waypoint = true, + use_turn_restrictions = true, + left_hand_driving = false, + traffic_light_penalty = 2, + }, + + default_mode = mode.driving, + default_speed = 28, + oneway_handling = true, + side_road_multiplier = 0.8, + turn_penalty = 7.5, + speed_reduction = 0.8, + turn_bias = 1.075, + cardinal_directions = false, + + -- Size of the vehicle, to be limited by physical restriction of the way + vehicle_height = 2.0, -- in meters, 2.0m is the height slightly above biggest SUVs + vehicle_width = 1.9, -- in meters, ways with narrow tag are considered narrower than 2.2m + + -- Size of the vehicle, to be limited mostly by legal restriction of the way + vehicle_length = 4.8, -- in meters, 4.8m is the length of large or family car + vehicle_weight = 2000, -- in kilograms + + -- a list of suffixes to suppress in name change instructions. The suffixes also include common substrings of each other + suffix_list = { + 'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East', 'Nor', 'Sou', 'We', 'Ea' + }, + + barrier_whitelist = Set { + 'cattle_grid', + 'border_control', + 'toll_booth', + 'sally_port', + 'gate', + 'lift_gate', + 'no', + 'entrance', + 'height_restrictor', + 'arch' + }, + + access_tag_whitelist = Set { + 'yes', + 'motorcar', + 'motor_vehicle', + 'vehicle', + 'permissive', + 'designated', + 'hov' + }, + + access_tag_blacklist = Set { + 'no', + 'delivery', + 'destination' + }, + + -- tags disallow access to in combination with highway=service + service_access_tag_blacklist = Set { + 'private' + }, + + restricted_access_tag_list = Set { + 'delivery', + 'destination', + }, + + access_tags_hierarchy = Sequence { + 'motorcar', + 'motor_vehicle', + 'vehicle', + 'access' + }, + + service_tag_forbidden = Set { + }, + + restrictions = Sequence { + 'motorcar', + 'motor_vehicle', + 'vehicle' + }, + + classes = Sequence { + 'toll', 'motorway', 'ferry', 'restricted', 'tunnel' + }, + + -- classes to support for exclude flags + excludable = Sequence { + Set {'toll'}, + Set {'motorway'}, + Set {'ferry'} + }, + + avoid = Set { + 'area', + -- 'toll', -- uncomment this to avoid tolls + 'reversible', + 'impassable', + 'hov_lanes', + 'steps', + 'construction', + 'proposed' + }, + + speeds = Sequence { + highway = { + motorway = 120, + motorway_link = 50, + trunk = 90, + trunk_link = 40, + primary = 80, + primary_link = 30, + secondary = 70, + secondary_link = 30, + tertiary = 40, + tertiary_link = 30, + unclassified = 40, + track = 30, + residential = 20, + living_street = 10, + service = 15 + } + }, + + service_penalties = { + alley = 0.5, + parking = 0.5, + parking_aisle = 0.5, + driveway = 0.5, + ["drive-through"] = 0.5, + ["drive-thru"] = 0.5 + }, + + restricted_highway_whitelist = Set { + 'motorway', + 'motorway_link', + 'trunk', + 'trunk_link', + 'primary', + 'primary_link', + 'secondary', + 'secondary_link', + 'tertiary', + 'tertiary_link', + 'residential', + 'living_street', + 'unclassified', + 'service', + 'track' + }, + + construction_whitelist = Set { + 'no', + 'widening', + 'minor', + }, + + route_speeds = { + ferry = 5, + shuttle_train = 10 + }, + + bridge_speeds = { + movable = 5 + }, + + -- surface/trackype/smoothness + -- values were estimated from looking at the photos at the relevant wiki pages + + -- max speed for surfaces + surface_speeds = { + asphalt = nil, -- nil mean no limit. removing the line has the same effect + concrete = nil, + ["concrete:plates"] = nil, + ["concrete:lanes"] = nil, + paved = nil, + + cement = 80, + compacted = 80, + fine_gravel = 80, + + paving_stones = 60, + metal = 60, + bricks = 60, + + grass = 40, + wood = 40, + sett = 40, + grass_paver = 40, + gravel = 40, + unpaved = 40, + ground = 40, + dirt = 40, + pebblestone = 40, + tartan = 40, + + cobblestone = 30, + clay = 30, + + earth = 20, + stone = 20, + rocky = 20, + sand = 20, + + mud = 10 + }, + + -- max speed for tracktypes + tracktype_speeds = { + grade1 = 60, + grade2 = 40, + grade3 = 30, + grade4 = 25, + grade5 = 20 + }, + + -- max speed for smoothnesses + smoothness_speeds = { + intermediate = 80, + bad = 40, + very_bad = 20, + horrible = 10, + very_horrible = 5, + impassable = 0 + }, + + -- http://wiki.openstreetmap.org/wiki/Speed_limits + maxspeed_table_default = { + urban = 50, + rural = 90, + trunk = 110, + motorway = 130 + }, + + -- List only exceptions + maxspeed_table = { + ["at:rural"] = 100, + ["at:trunk"] = 100, + ["be:motorway"] = 120, + ["be-bru:rural"] = 70, + ["be-bru:urban"] = 30, + ["be-vlg:rural"] = 70, + ["by:urban"] = 60, + ["by:motorway"] = 110, + ["ch:rural"] = 80, + ["ch:trunk"] = 100, + ["ch:motorway"] = 120, + ["cz:trunk"] = 0, + ["cz:motorway"] = 0, + ["de:living_street"] = 7, + ["de:rural"] = 100, + ["de:motorway"] = 0, + ["dk:rural"] = 80, + ["fr:rural"] = 80, + ["gb:nsl_single"] = (60*1609)/1000, + ["gb:nsl_dual"] = (70*1609)/1000, + ["gb:motorway"] = (70*1609)/1000, + ["nl:rural"] = 80, + ["nl:trunk"] = 100, + ['no:rural'] = 80, + ['no:motorway'] = 110, + ['pl:rural'] = 100, + ['pl:trunk'] = 120, + ['pl:motorway'] = 140, + ["ro:trunk"] = 100, + ["ru:living_street"] = 20, + ["ru:urban"] = 60, + ["ru:motorway"] = 110, + ["uk:nsl_single"] = (60*1609)/1000, + ["uk:nsl_dual"] = (70*1609)/1000, + ["uk:motorway"] = (70*1609)/1000, + ['za:urban'] = 60, + ['za:rural'] = 100, + ["none"] = 140 + }, + + relation_types = Sequence { + "route" + }, + + -- classify highway tags when necessary for turn weights + highway_turn_classification = { + }, + + -- classify access tags when necessary for turn weights + access_turn_classification = { + } + } +end + +function process_node(profile, node, result, relations) + -- parse access and barrier tags + local access = find_access_tag(node, profile.access_tags_hierarchy) + if access then + if profile.access_tag_blacklist[access] and not profile.restricted_access_tag_list[access] then + result.barrier = true + end + else + local barrier = node:get_value_by_key("barrier") + if barrier then + -- check height restriction barriers + local restricted_by_height = false + if barrier == 'height_restrictor' then + local maxheight = Measure.get_max_height(node:get_value_by_key("maxheight"), node) + restricted_by_height = maxheight and maxheight < profile.vehicle_height + end + + -- make an exception for rising bollard barriers + local bollard = node:get_value_by_key("bollard") + local rising_bollard = bollard and "rising" == bollard + + -- make an exception for lowered/flat barrier=kerb + -- and incorrect tagging of highway crossing kerb as highway barrier + local kerb = node:get_value_by_key("kerb") + local highway = node:get_value_by_key("highway") + local flat_kerb = kerb and ("lowered" == kerb or "flush" == kerb) + local highway_crossing_kerb = barrier == "kerb" and highway and highway == "crossing" + + if not profile.barrier_whitelist[barrier] + and not rising_bollard + and not flat_kerb + and not highway_crossing_kerb + or restricted_by_height then + result.barrier = true + end + end + end + + -- check if node is a traffic light + local tag = node:get_value_by_key("highway") + if "traffic_signals" == tag then + result.traffic_lights = true + end +end + +function process_way(profile, way, result, relations) + -- the intial filtering of ways based on presence of tags + -- affects processing times significantly, because all ways + -- have to be checked. + -- to increase performance, prefetching and intial tag check + -- is done in directly instead of via a handler. + + -- in general we should try to abort as soon as + -- possible if the way is not routable, to avoid doing + -- unnecessary work. this implies we should check things that + -- commonly forbids access early, and handle edge cases later. + + -- data table for storing intermediate values during processing + local data = { + -- prefetch tags + highway = way:get_value_by_key('highway'), + bridge = way:get_value_by_key('bridge'), + route = way:get_value_by_key('route') + } + + -- perform an quick initial check and abort if the way is + -- obviously not routable. + -- highway or route tags must be in data table, bridge is optional + if (not data.highway or data.highway == '') and + (not data.route or data.route == '') + then + return + end + + handlers = Sequence { + -- set the default mode for this profile. if can be changed later + -- in case it turns we're e.g. on a ferry + WayHandlers.default_mode, + + -- check various tags that could indicate that the way is not + -- routable. this includes things like status=impassable, + -- toll=yes and oneway=reversible + WayHandlers.blocked_ways, + WayHandlers.avoid_ways, + WayHandlers.handle_height, + WayHandlers.handle_width, + WayHandlers.handle_length, + WayHandlers.handle_weight, + + -- determine access status by checking our hierarchy of + -- access tags, e.g: motorcar, motor_vehicle, vehicle + WayHandlers.access, + + -- check whether forward/backward directions are routable + WayHandlers.oneway, + + -- check a road's destination + WayHandlers.destinations, + + -- check whether we're using a special transport mode + WayHandlers.ferries, + WayHandlers.movables, + + -- handle service road restrictions + WayHandlers.service, + + -- handle hov + WayHandlers.hov, + + -- compute speed taking into account way type, maxspeed tags, etc. + WayHandlers.speed, + WayHandlers.maxspeed, + WayHandlers.surface, + WayHandlers.penalties, + + -- compute class labels + WayHandlers.classes, + + -- handle turn lanes and road classification, used for guidance + WayHandlers.turn_lanes, + WayHandlers.classification, + + -- handle various other flags + WayHandlers.roundabouts, + WayHandlers.startpoint, + WayHandlers.driving_side, + + -- set name, ref and pronunciation + WayHandlers.names, + + -- set weight properties of the way + WayHandlers.weights, + + -- set classification of ways relevant for turns + WayHandlers.way_classification_for_turn + } + + WayHandlers.run(profile, way, result, data, handlers, relations) + + if profile.cardinal_directions then + Relations.process_way_refs(way, relations, result) + end +end + +function process_turn(profile, turn) + -- Use a sigmoid function to return a penalty that maxes out at turn_penalty + -- over the space of 0-180 degrees. Values here were chosen by fitting + -- the function to some turn penalty samples from real driving. + local turn_penalty = profile.turn_penalty + local turn_bias = turn.is_left_hand_driving and 1. / profile.turn_bias or profile.turn_bias + + if turn.has_traffic_light then + turn.duration = profile.properties.traffic_light_penalty + end + + if turn.number_of_roads > 2 or turn.source_mode ~= turn.target_mode or turn.is_u_turn then + if turn.angle >= 0 then + turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 / turn_bias) * turn.angle/180 - 6.5*turn_bias))) + else + turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 * turn_bias) * -turn.angle/180 - 6.5/turn_bias))) + end + + if turn.is_u_turn then + turn.duration = turn.duration + profile.properties.u_turn_penalty + end + end + + -- for distance based routing we don't want to have penalties based on turn angle + if profile.properties.weight_name == 'distance' then + turn.weight = 0 + else + turn.weight = turn.duration + end + + if profile.properties.weight_name == 'routability' then + -- penalize turns from non-local access only segments onto local access only tags + if not turn.source_restricted and turn.target_restricted then + turn.weight = constants.max_turn_weight + end + end +end + +return { + setup = setup, + process_way = process_way, + process_node = process_node, + process_turn = process_turn +} diff --git a/admiral-router/vehicles/ev.lua b/admiral-router/vehicles/ev.lua new file mode 100644 index 0000000..1e7538b --- /dev/null +++ b/admiral-router/vehicles/ev.lua @@ -0,0 +1,504 @@ +-- Car profile + +api_version = 4 + +Set = require('lib/set') +Sequence = require('lib/sequence') +Handlers = require("lib/way_handlers") +Relations = require("lib/relations") +find_access_tag = require("lib/access").find_access_tag +limit = require("lib/maxspeed").limit +Utils = require("lib/utils") +Measure = require("lib/measure") + +function setup() + return { + properties = { + max_speed_for_map_matching = 45/3.6, -- 180kmph -> m/s + -- For routing based on duration, but weighted for preferring certain roads +-- weight_name = 'routability', + -- For shortest duration without penalties for accessibility + weight_name = 'duration', + -- For shortest distance without penalties for accessibility + -- weight_name = 'distance', + process_call_tagless_node = false, + u_turn_penalty = 20, + continue_straight_at_waypoint = true, + use_turn_restrictions = true, + left_hand_driving = false, + traffic_light_penalty = 2, + }, + + default_mode = mode.driving, + default_speed = 20, + oneway_handling = true, + side_road_multiplier = 0.9, + turn_penalty = 4, + speed_reduction = 0.9, + turn_bias = 1.05, + cardinal_directions = false, + + -- Size of the vehicle, to be limited by physical restriction of the way + vehicle_height = 1.5, -- in meters, 2.0m is the height slightly above biggest SUVs + vehicle_width = 1.0, -- in meters, ways with narrow tag are considered narrower than 2.2m + + -- Size of the vehicle, to be limited mostly by legal restriction of the way + vehicle_length = 2, -- in meters, 4.8m is the length of large or family car + vehicle_weight = 200, -- in kilograms + + -- a list of suffixes to suppress in name change instructions. The suffixes also include common substrings of each other + suffix_list = { + 'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East', 'Nor', 'Sou', 'We', 'Ea' + }, + + barrier_whitelist = Set { + 'cattle_grid', + 'border_control', + 'toll_booth', + 'sally_port', + 'gate', + 'lift_gate', + 'no', + 'entrance', + 'height_restrictor', + 'arch' + }, + + access_tag_whitelist = Set { + 'yes', + 'motorcar', + "motorcycle", + 'motor_vehicle', + 'vehicle', + 'permissive', + 'designated', + 'hov' + }, + + access_tag_blacklist = Set { + 'no', + 'destination' + }, + + -- tags disallow access to in combination with highway=service + service_access_tag_blacklist = Set { + 'private' + }, + + restricted_access_tag_list = Set { + 'destination', + }, + + access_tags_hierarchy = Sequence { + 'motorcar', + 'motor_vehicle', + 'vehicle', + 'access' + }, + + service_tag_forbidden = Set { + }, + + restrictions = Sequence { + 'motorcar', + 'motor_vehicle', + 'vehicle' + }, + + classes = Sequence { + 'toll', 'motorway', 'ferry', 'restricted', 'tunnel' + }, + + -- classes to support for exclude flags + excludable = Sequence { + Set {'toll'}, + Set {'motorway'}, + Set {'ferry'} + }, + + avoid = Set { + 'area', + -- 'toll', -- uncomment this to avoid tolls + 'reversible', + 'impassable', + 'hov_lanes', + 'steps', + 'construction', + 'proposed' + }, + + speeds = Sequence { + highway = { + motorway = 45, + motorway_link = 45, + trunk = 45, + trunk_link = 45, + primary = 45, + primary_link = 30, + secondary = 45, + secondary_link = 30, + tertiary = 25, + tertiary_link = 25, + unclassified = 25, + track = 20, + residential = 14, + living_street = 10, + service = 10 + } + }, + + service_penalties = { + alley = 0.5, + parking = 0.5, + parking_aisle = 0.5, + driveway = 0.5, + ["drive-through"] = 0.5, + ["drive-thru"] = 0.5 + }, + + restricted_highway_whitelist = Set { + 'motorway', + 'motorway_link', + 'trunk', + 'trunk_link', + 'primary', + 'primary_link', + 'secondary', + 'secondary_link', + 'tertiary', + 'tertiary_link', + 'residential', + 'living_street', + 'unclassified', + 'service', + 'track' + }, + + construction_whitelist = Set { + 'no', + 'widening', + 'minor', + }, + + route_speeds = { + ferry = 5, + shuttle_train = 10 + }, + + bridge_speeds = { + movable = 5 + }, + + -- surface/trackype/smoothness + -- values were estimated from looking at the photos at the relevant wiki pages + + -- max speed for surfaces + surface_speeds = { + asphalt = nil, -- nil mean no limit. removing the line has the same effect + concrete = nil, + ["concrete:plates"] = nil, + ["concrete:lanes"] = nil, + paved = nil, + + cement = 80, + compacted = 80, + fine_gravel = 80, + + paving_stones = 60, + metal = 60, + bricks = 60, + + grass = 40, + wood = 40, + sett = 40, + grass_paver = 40, + gravel = 40, + unpaved = 40, + ground = 40, + dirt = 40, + pebblestone = 40, + tartan = 40, + + cobblestone = 30, + clay = 30, + + earth = 20, + stone = 20, + rocky = 20, + sand = 20, + + mud = 10 + }, + + -- max speed for tracktypes + tracktype_speeds = { + grade1 = 60, + grade2 = 40, + grade3 = 30, + grade4 = 25, + grade5 = 20 + }, + + -- max speed for smoothnesses + smoothness_speeds = { + intermediate = 80, + bad = 40, + very_bad = 20, + horrible = 10, + very_horrible = 5, + impassable = 0 + }, + + -- http://wiki.openstreetmap.org/wiki/Speed_limits + maxspeed_table_default = { + urban = 50, + rural = 90, + trunk = 100, + motorway = 100 + }, + + -- List only exceptions + maxspeed_table = { + ["at:rural"] = 100, + ["at:trunk"] = 100, + ["be:motorway"] = 120, + ["be-bru:rural"] = 70, + ["be-bru:urban"] = 30, + ["be-vlg:rural"] = 70, + ["by:urban"] = 60, + ["by:motorway"] = 100, + ["ch:rural"] = 80, + ["ch:trunk"] = 100, + ["ch:motorway"] = 100, + ["cz:trunk"] = 0, + ["cz:motorway"] = 0, + ["de:living_street"] = 7, + ["de:rural"] = 100, + ["de:motorway"] = 0, + ["dk:rural"] = 80, + ["fr:rural"] = 80, + ["gb:nsl_single"] = (60*1609)/1000, + ["gb:nsl_dual"] = (70*1609)/1000, + ["gb:motorway"] = (70*1609)/1000, + ["nl:rural"] = 80, + ["nl:trunk"] = 100, + ['no:rural'] = 80, + ['no:motorway'] = 100, + ['pl:rural'] = 100, + ['pl:trunk'] = 100, + ['pl:motorway'] = 100, + ["ro:trunk"] = 100, + ["ru:living_street"] = 20, + ["ru:urban"] = 60, + ["ru:motorway"] = 100, + ["uk:nsl_single"] = (60*1609)/1000, + ["uk:nsl_dual"] = (70*1609)/1000, + ["uk:motorway"] = (70*1609)/1000, + ['za:urban'] = 60, + ['za:rural'] = 100, + ["none"] = 100 + }, + + relation_types = Sequence { + "route" + }, + + -- classify highway tags when necessary for turn weights + highway_turn_classification = { + }, + + -- classify access tags when necessary for turn weights + access_turn_classification = { + } + } +end + +function process_node(profile, node, result, relations) + -- parse access and barrier tags + local access = find_access_tag(node, profile.access_tags_hierarchy) + if access then + if profile.access_tag_blacklist[access] and not profile.restricted_access_tag_list[access] then + result.barrier = true + end + else + local barrier = node:get_value_by_key("barrier") + if barrier then + -- check height restriction barriers + local restricted_by_height = false + if barrier == 'height_restrictor' then + local maxheight = Measure.get_max_height(node:get_value_by_key("maxheight"), node) + restricted_by_height = maxheight and maxheight < profile.vehicle_height + end + + -- make an exception for rising bollard barriers + local bollard = node:get_value_by_key("bollard") + local rising_bollard = bollard and "rising" == bollard + + -- make an exception for lowered/flat barrier=kerb + -- and incorrect tagging of highway crossing kerb as highway barrier + local kerb = node:get_value_by_key("kerb") + local highway = node:get_value_by_key("highway") + local flat_kerb = kerb and ("lowered" == kerb or "flush" == kerb) + local highway_crossing_kerb = barrier == "kerb" and highway and highway == "crossing" + + if not profile.barrier_whitelist[barrier] + and not rising_bollard + and not flat_kerb + and not highway_crossing_kerb + or restricted_by_height then + result.barrier = true + end + end + end + + -- check if node is a traffic light + local tag = node:get_value_by_key("highway") + if "traffic_signals" == tag then + result.traffic_lights = true + end +end + +function process_way(profile, way, result, relations) + -- the intial filtering of ways based on presence of tags + -- affects processing times significantly, because all ways + -- have to be checked. + -- to increase performance, prefetching and intial tag check + -- is done in directly instead of via a handler. + + -- in general we should try to abort as soon as + -- possible if the way is not routable, to avoid doing + -- unnecessary work. this implies we should check things that + -- commonly forbids access early, and handle edge cases later. + + -- data table for storing intermediate values during processing + local data = { + -- prefetch tags + highway = way:get_value_by_key('highway'), + bridge = way:get_value_by_key('bridge'), + route = way:get_value_by_key('route') + } + + -- perform an quick initial check and abort if the way is + -- obviously not routable. + -- highway or route tags must be in data table, bridge is optional + if (not data.highway or data.highway == '') and + (not data.route or data.route == '') + then + return + end + + handlers = Sequence { + -- set the default mode for this profile. if can be changed later + -- in case it turns we're e.g. on a ferry + WayHandlers.default_mode, + + -- check various tags that could indicate that the way is not + -- routable. this includes things like status=impassable, + -- toll=yes and oneway=reversible + WayHandlers.blocked_ways, + WayHandlers.avoid_ways, + WayHandlers.handle_height, + WayHandlers.handle_width, + WayHandlers.handle_length, + WayHandlers.handle_weight, + + -- determine access status by checking our hierarchy of + -- access tags, e.g: motorcar, motor_vehicle, vehicle + WayHandlers.access, + + -- check whether forward/backward directions are routable + WayHandlers.oneway, + + -- check a road's destination + WayHandlers.destinations, + + -- check whether we're using a special transport mode + WayHandlers.ferries, + WayHandlers.movables, + + -- handle service road restrictions + WayHandlers.service, + + -- handle hov + WayHandlers.hov, + + -- compute speed taking into account way type, maxspeed tags, etc. + WayHandlers.speed, + WayHandlers.maxspeed, + WayHandlers.surface, + WayHandlers.penalties, + + -- compute class labels + WayHandlers.classes, + + -- handle turn lanes and road classification, used for guidance + WayHandlers.turn_lanes, + WayHandlers.classification, + + -- handle various other flags + WayHandlers.roundabouts, + WayHandlers.startpoint, + WayHandlers.driving_side, + + -- set name, ref and pronunciation + WayHandlers.names, + + -- set weight properties of the way + WayHandlers.weights, + + -- set classification of ways relevant for turns + WayHandlers.way_classification_for_turn + } + + WayHandlers.run(profile, way, result, data, handlers, relations) + + if profile.cardinal_directions then + Relations.process_way_refs(way, relations, result) + end +end + +function process_turn(profile, turn) + -- Use a sigmoid function to return a penalty that maxes out at turn_penalty + -- over the space of 0-180 degrees. Values here were chosen by fitting + -- the function to some turn penalty samples from real driving. + local turn_penalty = profile.turn_penalty + local turn_bias = turn.is_left_hand_driving and 1. / profile.turn_bias or profile.turn_bias + + if turn.has_traffic_light then + turn.duration = profile.properties.traffic_light_penalty + end + + if turn.number_of_roads > 2 or turn.source_mode ~= turn.target_mode or turn.is_u_turn then + if turn.angle >= 0 then + turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 / turn_bias) * turn.angle/180 - 6.5*turn_bias))) + else + turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 * turn_bias) * -turn.angle/180 - 6.5/turn_bias))) + end + + if turn.is_u_turn then + turn.duration = turn.duration + profile.properties.u_turn_penalty + end + end + + -- for distance based routing we don't want to have penalties based on turn angle + if profile.properties.weight_name == 'distance' then + turn.weight = 0 + else + turn.weight = turn.duration + end + + if profile.properties.weight_name == 'routability' then + -- penalize turns from non-local access only segments onto local access only tags + if not turn.source_restricted and turn.target_restricted then + turn.weight = constants.max_turn_weight + end + end +end + + +return { + setup = setup, + process_way = process_way, + process_node = process_node, + process_turn = process_turn +} diff --git a/admiral-router/vehicles/km.lua b/admiral-router/vehicles/km.lua new file mode 100644 index 0000000..adfd741 --- /dev/null +++ b/admiral-router/vehicles/km.lua @@ -0,0 +1,683 @@ +-- Bicycle profile + +api_version = 4 + +Set = require('lib/set') +Sequence = require('lib/sequence') +Handlers = require("lib/way_handlers") +find_access_tag = require("lib/access").find_access_tag +limit = require("lib/maxspeed").limit +Measure = require("lib/measure") + +function setup() + local max_speed = 50 + local default_speed = 22 + local walking_speed = 5 + + return { + properties = { + u_turn_penalty = 20, + traffic_light_penalty = 2, + --weight_name = 'cyclability', + weight_name = 'duration', +-- weight_name = 'distance', + process_call_tagless_node = false, + max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s + use_turn_restrictions = false, + continue_straight_at_waypoint = false, + mode_change_penalty = 30, + }, + + default_mode = mode.cycling, + default_speed = default_speed, + walking_speed = walking_speed, + oneway_handling = true, + turn_penalty = 6, + turn_bias = 1.4, + use_public_transport = true, + + allowed_start_modes = Set { + mode.cycling, + mode.pushing_bike + }, + + barrier_blacklist = Set { + 'yes', + 'wall', + 'fence' + }, + + access_tag_whitelist = Set { + 'yes', + 'permissive', + 'designated' + }, + + access_tag_blacklist = Set { + 'no', + -- When a way is tagged with `use_sidepath` a parallel way suitable for + -- cyclists is mapped and must be used instead (by law). This tag is + -- used on ways that normally may be used by cyclists, but not when + -- a signposted parallel cycleway is available. For purposes of routing + -- cyclists, this value should be treated as 'no access for bicycles'. + 'use_sidepath' + }, + + restricted_access_tag_list = Set { }, + + restricted_highway_whitelist = Set { }, + + -- tags disallow access to in combination with highway=service + service_access_tag_blacklist = Set { }, + + construction_whitelist = Set { + 'no', + 'widening', + 'minor', + }, + + access_tags_hierarchy = Sequence { + 'bicycle', + 'vehicle', + 'access' + }, + + restrictions = Set { + 'bicycle' + }, + + cycleway_tags = Set { + 'track', + 'lane', + 'share_busway', + 'sharrow', + 'shared', + 'shared_lane' + }, + + opposite_cycleway_tags = Set { + 'opposite', + 'opposite_lane', + 'opposite_track', + }, + + -- reduce the driving speed by 30% for unsafe roads + -- only used for cyclability metric + unsafe_highway_list = { + primary = 0.5, + secondary = 0.65, + tertiary = 0.8, + primary_link = 0.5, + secondary_link = 0.65, + tertiary_link = 0.8, + }, + + service_penalties = { + alley = 0.5, + }, + + bicycle_speeds = { + cycleway = default_speed, + primary = 45, + primary_link = 30, + secondary = 45, + secondary_link = 30, + tertiary = 25, + tertiary_link = 25, + residential = 14, + unclassified = 25, + living_street = 10, + road = default_speed, + service = 10, + track = 12, + path = 12 + }, + + pedestrian_speeds = { + footway = walking_speed, + pedestrian = walking_speed, + steps = 2 + }, + + railway_speeds = { + train = 10, + railway = 10, + subway = 10, + light_rail = 10, + monorail = 10, + tram = 10 + }, + + platform_speeds = { + platform = walking_speed + }, + + amenity_speeds = { + parking = 10, + parking_entrance = 10 + }, + + man_made_speeds = { + pier = walking_speed + }, + + route_speeds = { + ferry = 5 + }, + + bridge_speeds = { + movable = 5 + }, + + surface_speeds = { + asphalt = default_speed, + ["cobblestone:flattened"] = 10, + paving_stones = 10, + compacted = 10, + cobblestone = 6, + unpaved = 6, + fine_gravel = 6, + gravel = 6, + pebblestone = 6, + ground = 6, + dirt = 6, + earth = 6, + grass = 6, + mud = 3, + sand = 3, + sett = 10 + }, + + classes = Sequence { + 'ferry', 'tunnel' + }, + + -- Which classes should be excludable + -- This increases memory usage so its disabled by default. + excludable = Sequence { +-- Set {'ferry'} + }, + + tracktype_speeds = { + }, + + smoothness_speeds = { + }, + + avoid = Set { + 'impassable', + 'construction' + } + } +end + +function process_node(profile, node, result) + -- parse access and barrier tags + local highway = node:get_value_by_key("highway") + local is_crossing = highway and highway == "crossing" + + local access = find_access_tag(node, profile.access_tags_hierarchy) + if access and access ~= "" then + -- access restrictions on crossing nodes are not relevant for + -- the traffic on the road + if profile.access_tag_blacklist[access] and not is_crossing then + result.barrier = true + end + else + local barrier = node:get_value_by_key("barrier") + if barrier and "" ~= barrier then + if profile.barrier_blacklist[barrier] then + result.barrier = true + end + end + end + + -- check if node is a traffic light + local tag = node:get_value_by_key("highway") + if tag and "traffic_signals" == tag then + result.traffic_lights = true + end +end + +function handle_bicycle_tags(profile,way,result,data) + -- initial routability check, filters out buildings, boundaries, etc + data.route = way:get_value_by_key("route") + data.man_made = way:get_value_by_key("man_made") + data.railway = way:get_value_by_key("railway") + data.amenity = way:get_value_by_key("amenity") + data.public_transport = way:get_value_by_key("public_transport") + data.bridge = way:get_value_by_key("bridge") + + if (not data.highway or data.highway == '') and + (not data.route or data.route == '') and + (not profile.use_public_transport or not data.railway or data.railway=='') and + (not data.amenity or data.amenity=='') and + (not data.man_made or data.man_made=='') and + (not data.public_transport or data.public_transport=='') and + (not data.bridge or data.bridge=='') + then + return false + end + + -- access + data.access = find_access_tag(way, profile.access_tags_hierarchy) + if data.access and profile.access_tag_blacklist[data.access] then + return false + end + + -- other tags + data.junction = way:get_value_by_key("junction") + data.maxspeed = Measure.get_max_speed(way:get_value_by_key ("maxspeed")) or 0 + data.maxspeed_forward = Measure.get_max_speed(way:get_value_by_key("maxspeed:forward")) or 0 + data.maxspeed_backward = Measure.get_max_speed(way:get_value_by_key("maxspeed:backward")) or 0 + data.barrier = way:get_value_by_key("barrier") + data.oneway = way:get_value_by_key("oneway") + data.oneway_bicycle = way:get_value_by_key("oneway:bicycle") + data.cycleway = way:get_value_by_key("cycleway") + data.cycleway_left = way:get_value_by_key("cycleway:left") + data.cycleway_right = way:get_value_by_key("cycleway:right") + data.duration = way:get_value_by_key("duration") + data.service = way:get_value_by_key("service") + data.foot = way:get_value_by_key("foot") + data.foot_forward = way:get_value_by_key("foot:forward") + data.foot_backward = way:get_value_by_key("foot:backward") + data.bicycle = way:get_value_by_key("bicycle") + + speed_handler(profile,way,result,data) + + oneway_handler(profile,way,result,data) + + cycleway_handler(profile,way,result,data) + + bike_push_handler(profile,way,result,data) + + + -- maxspeed + limit( result, data.maxspeed, data.maxspeed_forward, data.maxspeed_backward ) + + -- not routable if no speed assigned + -- this avoid assertions in debug builds + if result.forward_speed <= 0 and result.duration <= 0 then + result.forward_mode = mode.inaccessible + end + if result.backward_speed <= 0 and result.duration <= 0 then + result.backward_mode = mode.inaccessible + end + + safety_handler(profile,way,result,data) +end + + + +function speed_handler(profile,way,result,data) + + data.way_type_allows_pushing = false + + -- speed + local bridge_speed = profile.bridge_speeds[data.bridge] + if (bridge_speed and bridge_speed > 0) then + data.highway = data.bridge + if data.duration and durationIsValid(data.duration) then + result.duration = math.max( parseDuration(data.duration), 1 ) + end + result.forward_speed = bridge_speed + result.backward_speed = bridge_speed + data.way_type_allows_pushing = true + elseif profile.route_speeds[data.route] then + -- ferries (doesn't cover routes tagged using relations) + result.forward_mode = mode.ferry + result.backward_mode = mode.ferry + if data.duration and durationIsValid(data.duration) then + result.duration = math.max( 1, parseDuration(data.duration) ) + else + result.forward_speed = profile.route_speeds[data.route] + result.backward_speed = profile.route_speeds[data.route] + end + -- railway platforms (old tagging scheme) + elseif data.railway and profile.platform_speeds[data.railway] then + result.forward_speed = profile.platform_speeds[data.railway] + result.backward_speed = profile.platform_speeds[data.railway] + data.way_type_allows_pushing = true + -- public_transport platforms (new tagging platform) + elseif data.public_transport and profile.platform_speeds[data.public_transport] then + result.forward_speed = profile.platform_speeds[data.public_transport] + result.backward_speed = profile.platform_speeds[data.public_transport] + data.way_type_allows_pushing = true + -- railways + elseif profile.use_public_transport and data.railway and profile.railway_speeds[data.railway] and profile.access_tag_whitelist[data.access] then + result.forward_mode = mode.train + result.backward_mode = mode.train + result.forward_speed = profile.railway_speeds[data.railway] + result.backward_speed = profile.railway_speeds[data.railway] + elseif data.amenity and profile.amenity_speeds[data.amenity] then + -- parking areas + result.forward_speed = profile.amenity_speeds[data.amenity] + result.backward_speed = profile.amenity_speeds[data.amenity] + data.way_type_allows_pushing = true + elseif profile.bicycle_speeds[data.highway] then + -- regular ways + result.forward_speed = profile.bicycle_speeds[data.highway] + result.backward_speed = profile.bicycle_speeds[data.highway] + data.way_type_allows_pushing = true + elseif data.access and profile.access_tag_whitelist[data.access] then + -- unknown way, but valid access tag + result.forward_speed = profile.default_speed + result.backward_speed = profile.default_speed + data.way_type_allows_pushing = true + end +end + +function oneway_handler(profile,way,result,data) + -- oneway + data.implied_oneway = data.junction == "roundabout" or data.junction == "circular" or data.highway == "motorway" + data.reverse = false + + if data.oneway_bicycle == "yes" or data.oneway_bicycle == "1" or data.oneway_bicycle == "true" then + result.backward_mode = mode.inaccessible + elseif data.oneway_bicycle == "no" or data.oneway_bicycle == "0" or data.oneway_bicycle == "false" then + -- prevent other cases + elseif data.oneway_bicycle == "-1" then + result.forward_mode = mode.inaccessible + data.reverse = true + elseif data.oneway == "yes" or data.oneway == "1" or data.oneway == "true" then + result.backward_mode = mode.inaccessible + elseif data.oneway == "no" or data.oneway == "0" or data.oneway == "false" then + -- prevent other cases + elseif data.oneway == "-1" then + result.forward_mode = mode.inaccessible + data.reverse = true + elseif data.implied_oneway then + result.backward_mode = mode.inaccessible + end +end + +function cycleway_handler(profile,way,result,data) + -- cycleway + data.has_cycleway_forward = false + data.has_cycleway_backward = false + data.is_twoway = result.forward_mode ~= mode.inaccessible and result.backward_mode ~= mode.inaccessible and not data.implied_oneway + + -- cycleways on normal roads + if data.is_twoway then + if data.cycleway and profile.cycleway_tags[data.cycleway] then + data.has_cycleway_backward = true + data.has_cycleway_forward = true + end + if (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) then + data.has_cycleway_forward = true + end + if (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) then + data.has_cycleway_backward = true + end + else + local has_twoway_cycleway = (data.cycleway and profile.opposite_cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) + local has_opposite_cycleway = (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) + local has_oneway_cycleway = (data.cycleway and profile.cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) + + -- set cycleway even though it is an one-way if opposite is tagged + if has_twoway_cycleway then + data.has_cycleway_backward = true + data.has_cycleway_forward = true + elseif has_opposite_cycleway then + if not data.reverse then + data.has_cycleway_backward = true + else + data.has_cycleway_forward = true + end + elseif has_oneway_cycleway then + if not data.reverse then + data.has_cycleway_forward = true + else + data.has_cycleway_backward = true + end + + end + end + + if data.has_cycleway_backward then + result.backward_mode = mode.cycling + result.backward_speed = profile.bicycle_speeds["cycleway"] + end + + if data.has_cycleway_forward then + result.forward_mode = mode.cycling + result.forward_speed = profile.bicycle_speeds["cycleway"] + end +end + +function bike_push_handler(profile,way,result,data) + -- pushing bikes - if no other mode found + if result.forward_mode == mode.inaccessible or result.backward_mode == mode.inaccessible or + result.forward_speed == -1 or result.backward_speed == -1 then + if data.foot ~= 'no' then + local push_forward_speed = nil + local push_backward_speed = nil + + if profile.pedestrian_speeds[data.highway] then + push_forward_speed = profile.pedestrian_speeds[data.highway] + push_backward_speed = profile.pedestrian_speeds[data.highway] + elseif data.man_made and profile.man_made_speeds[data.man_made] then + push_forward_speed = profile.man_made_speeds[data.man_made] + push_backward_speed = profile.man_made_speeds[data.man_made] + else + if data.foot == 'yes' then + push_forward_speed = profile.walking_speed + if not data.implied_oneway then + push_backward_speed = profile.walking_speed + end + elseif data.foot_forward == 'yes' then + push_forward_speed = profile.walking_speed + elseif data.foot_backward == 'yes' then + push_backward_speed = profile.walking_speed + elseif data.way_type_allows_pushing then + push_forward_speed = profile.walking_speed + if not data.implied_oneway then + push_backward_speed = profile.walking_speed + end + end + end + + if push_forward_speed and (result.forward_mode == mode.inaccessible or result.forward_speed == -1) then + result.forward_mode = mode.pushing_bike + result.forward_speed = push_forward_speed + end + if push_backward_speed and (result.backward_mode == mode.inaccessible or result.backward_speed == -1)then + result.backward_mode = mode.pushing_bike + result.backward_speed = push_backward_speed + end + + end + + end + + -- dismount + if data.bicycle == "dismount" then + result.forward_mode = mode.pushing_bike + result.backward_mode = mode.pushing_bike + result.forward_speed = profile.walking_speed + result.backward_speed = profile.walking_speed + end +end + +function safety_handler(profile,way,result,data) + -- convert duration into cyclability + if profile.properties.weight_name == 'cyclability' then + local safety_penalty = profile.unsafe_highway_list[data.highway] or 1. + local is_unsafe = safety_penalty < 1 + + -- primaries that are one ways are probably huge primaries where the lanes need to be separated + if is_unsafe and data.highway == 'primary' and not data.is_twoway then + safety_penalty = safety_penalty * 0.5 + end + if is_unsafe and data.highway == 'secondary' and not data.is_twoway then + safety_penalty = safety_penalty * 0.6 + end + + local forward_is_unsafe = is_unsafe and not data.has_cycleway_forward + local backward_is_unsafe = is_unsafe and not data.has_cycleway_backward + local is_undesireable = data.highway == "service" and profile.service_penalties[data.service] + local forward_penalty = 1. + local backward_penalty = 1. + if forward_is_unsafe then + forward_penalty = math.min(forward_penalty, safety_penalty) + end + if backward_is_unsafe then + backward_penalty = math.min(backward_penalty, safety_penalty) + end + + if is_undesireable then + forward_penalty = math.min(forward_penalty, profile.service_penalties[data.service]) + backward_penalty = math.min(backward_penalty, profile.service_penalties[data.service]) + end + + if result.forward_speed > 0 then + -- convert from km/h to m/s + result.forward_rate = result.forward_speed / 3.6 * forward_penalty + end + if result.backward_speed > 0 then + -- convert from km/h to m/s + result.backward_rate = result.backward_speed / 3.6 * backward_penalty + end + if result.duration > 0 then + result.weight = result.duration / forward_penalty + end + + if data.highway == "bicycle" then + safety_bonus = safety_bonus + 0.2 + if result.forward_speed > 0 then + -- convert from km/h to m/s + result.forward_rate = result.forward_speed / 3.6 * safety_bonus + end + if result.backward_speed > 0 then + -- convert from km/h to m/s + result.backward_rate = result.backward_speed / 3.6 * safety_bonus + end + if result.duration > 0 then + result.weight = result.duration / safety_bonus + end + end + end +end + + + +function process_way(profile, way, result) + -- the initial filtering of ways based on presence of tags + -- affects processing times significantly, because all ways + -- have to be checked. + -- to increase performance, prefetching and initial tag check + -- is done directly instead of via a handler. + + -- in general we should try to abort as soon as + -- possible if the way is not routable, to avoid doing + -- unnecessary work. this implies we should check things that + -- commonly forbids access early, and handle edge cases later. + + -- data table for storing intermediate values during processing + + local data = { + -- prefetch tags + highway = way:get_value_by_key('highway'), + + route = nil, + man_made = nil, + railway = nil, + amenity = nil, + public_transport = nil, + bridge = nil, + + access = nil, + + junction = nil, + maxspeed = nil, + maxspeed_forward = nil, + maxspeed_backward = nil, + barrier = nil, + oneway = nil, + oneway_bicycle = nil, + cycleway = nil, + cycleway_left = nil, + cycleway_right = nil, + duration = nil, + service = nil, + foot = nil, + foot_forward = nil, + foot_backward = nil, + bicycle = nil, + + way_type_allows_pushing = false, + has_cycleway_forward = false, + has_cycleway_backward = false, + is_twoway = true, + reverse = false, + implied_oneway = false + } + + local handlers = Sequence { + -- set the default mode for this profile. if can be changed later + -- in case it turns we're e.g. on a ferry + WayHandlers.default_mode, + + -- check various tags that could indicate that the way is not + -- routable. this includes things like status=impassable, + -- toll=yes and oneway=reversible + WayHandlers.blocked_ways, + + -- our main handler + handle_bicycle_tags, + + -- compute speed taking into account way type, maxspeed tags, etc. + WayHandlers.surface, + + -- handle turn lanes and road classification, used for guidance + WayHandlers.classification, + + -- handle allowed start/end modes + WayHandlers.startpoint, + + -- handle roundabouts + WayHandlers.roundabouts, + + -- set name, ref and pronunciation + WayHandlers.names, + + -- set classes + WayHandlers.classes, + + -- set weight properties of the way + WayHandlers.weights + } + + WayHandlers.run(profile, way, result, data, handlers) +end + +function process_turn(profile, turn) + -- compute turn penalty as angle^2, with a left/right bias + local normalized_angle = turn.angle / 90.0 + if normalized_angle >= 0.0 then + turn.duration = normalized_angle * normalized_angle * profile.turn_penalty / profile.turn_bias + else + turn.duration = normalized_angle * normalized_angle * profile.turn_penalty * profile.turn_bias + end + + if turn.is_u_turn then + turn.duration = turn.duration + profile.properties.u_turn_penalty + end + + if turn.has_traffic_light then + turn.duration = turn.duration + profile.properties.traffic_light_penalty + end + if profile.properties.weight_name == 'cyclability' then + turn.weight = turn.duration + end + if turn.source_mode == mode.cycling and turn.target_mode ~= mode.cycling then + turn.weight = turn.weight + profile.properties.mode_change_penalty + end +end + +return { + setup = setup, + process_way = process_way, + process_node = process_node, + process_turn = process_turn +} diff --git a/admiral-router/vehicles/kpm.lua b/admiral-router/vehicles/kpm.lua new file mode 100644 index 0000000..6ddcc92 --- /dev/null +++ b/admiral-router/vehicles/kpm.lua @@ -0,0 +1,687 @@ +-- Bicycle profile + +api_version = 4 + +Set = require('lib/set') +Sequence = require('lib/sequence') +Handlers = require("lib/way_handlers") +find_access_tag = require("lib/access").find_access_tag +limit = require("lib/maxspeed").limit +Measure = require("lib/measure") + +function setup() + local max_speed = 25 + local default_speed = 17 + local walking_speed = 5 + + return { + properties = { + u_turn_penalty = 20, + traffic_light_penalty = 2, + --weight_name = 'cyclability', + weight_name = 'duration', +-- weight_name = 'distance', + process_call_tagless_node = false, + max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s + use_turn_restrictions = false, + continue_straight_at_waypoint = false, + mode_change_penalty = 30, + }, + + default_mode = mode.cycling, + default_speed = default_speed, + walking_speed = walking_speed, + oneway_handling = true, + turn_penalty = 6, + turn_bias = 1.4, + use_public_transport = true, + + allowed_start_modes = Set { + mode.cycling, + mode.pushing_bike + }, + + barrier_blacklist = Set { + 'yes', + 'wall', + 'fence' + }, + + access_tag_whitelist = Set { + 'yes', + 'permissive', + 'designated' + }, + + access_tag_blacklist = Set { + 'no', + 'private', + 'agricultural', + 'forestry', + 'delivery', + -- When a way is tagged with `use_sidepath` a parallel way suitable for + -- cyclists is mapped and must be used instead (by law). This tag is + -- used on ways that normally may be used by cyclists, but not when + -- a signposted parallel cycleway is available. For purposes of routing + -- cyclists, this value should be treated as 'no access for bicycles'. + 'use_sidepath' + }, + + restricted_access_tag_list = Set { }, + + restricted_highway_whitelist = Set { }, + + -- tags disallow access to in combination with highway=service + service_access_tag_blacklist = Set { }, + + construction_whitelist = Set { + 'no', + 'widening', + 'minor', + }, + + access_tags_hierarchy = Sequence { + 'bicycle', + 'vehicle', + 'access' + }, + + restrictions = Set { + 'bicycle' + }, + + cycleway_tags = Set { + 'track', + 'lane', + 'share_busway', + 'sharrow', + 'shared', + 'shared_lane' + }, + + opposite_cycleway_tags = Set { + 'opposite', + 'opposite_lane', + 'opposite_track', + }, + + -- reduce the driving speed by 30% for unsafe roads + -- only used for cyclability metric + unsafe_highway_list = { + primary = 0.5, + secondary = 0.65, + tertiary = 0.8, + primary_link = 0.5, + secondary_link = 0.65, + tertiary_link = 0.8, + }, + + service_penalties = { + alley = 0.5, + }, + + bicycle_speeds = { + cycleway = default_speed, + primary = 25, + primary_link = 25, + secondary = 25, + secondary_link = 25, + tertiary = 20, + tertiary_link = 20, + residential = 12, + unclassified = 20, + living_street = 10, + road = default_speed, + service = 10, + track = 12, + path = 12 + }, + + pedestrian_speeds = { + footway = walking_speed, + pedestrian = walking_speed, + steps = 2 + }, + + railway_speeds = { + train = 10, + railway = 10, + subway = 10, + light_rail = 10, + monorail = 10, + tram = 10 + }, + + platform_speeds = { + platform = walking_speed + }, + + amenity_speeds = { + parking = 10, + parking_entrance = 10 + }, + + man_made_speeds = { + pier = walking_speed + }, + + route_speeds = { + ferry = 5 + }, + + bridge_speeds = { + movable = 5 + }, + + surface_speeds = { + asphalt = default_speed, + ["cobblestone:flattened"] = 10, + paving_stones = 10, + compacted = 10, + cobblestone = 6, + unpaved = 6, + fine_gravel = 6, + gravel = 6, + pebblestone = 6, + ground = 6, + dirt = 6, + earth = 6, + grass = 6, + mud = 3, + sand = 3, + sett = 10 + }, + + classes = Sequence { + 'ferry', 'tunnel' + }, + + -- Which classes should be excludable + -- This increases memory usage so its disabled by default. + excludable = Sequence { +-- Set {'ferry'} + }, + + tracktype_speeds = { + }, + + smoothness_speeds = { + }, + + avoid = Set { + 'impassable', + 'construction' + } + } +end + +function process_node(profile, node, result) + -- parse access and barrier tags + local highway = node:get_value_by_key("highway") + local is_crossing = highway and highway == "crossing" + + local access = find_access_tag(node, profile.access_tags_hierarchy) + if access and access ~= "" then + -- access restrictions on crossing nodes are not relevant for + -- the traffic on the road + if profile.access_tag_blacklist[access] and not is_crossing then + result.barrier = true + end + else + local barrier = node:get_value_by_key("barrier") + if barrier and "" ~= barrier then + if profile.barrier_blacklist[barrier] then + result.barrier = true + end + end + end + + -- check if node is a traffic light + local tag = node:get_value_by_key("highway") + if tag and "traffic_signals" == tag then + result.traffic_lights = true + end +end + +function handle_bicycle_tags(profile,way,result,data) + -- initial routability check, filters out buildings, boundaries, etc + data.route = way:get_value_by_key("route") + data.man_made = way:get_value_by_key("man_made") + data.railway = way:get_value_by_key("railway") + data.amenity = way:get_value_by_key("amenity") + data.public_transport = way:get_value_by_key("public_transport") + data.bridge = way:get_value_by_key("bridge") + + if (not data.highway or data.highway == '') and + (not data.route or data.route == '') and + (not profile.use_public_transport or not data.railway or data.railway=='') and + (not data.amenity or data.amenity=='') and + (not data.man_made or data.man_made=='') and + (not data.public_transport or data.public_transport=='') and + (not data.bridge or data.bridge=='') + then + return false + end + + -- access + data.access = find_access_tag(way, profile.access_tags_hierarchy) + if data.access and profile.access_tag_blacklist[data.access] then + return false + end + + -- other tags + data.junction = way:get_value_by_key("junction") + data.maxspeed = Measure.get_max_speed(way:get_value_by_key ("maxspeed")) or 0 + data.maxspeed_forward = Measure.get_max_speed(way:get_value_by_key("maxspeed:forward")) or 0 + data.maxspeed_backward = Measure.get_max_speed(way:get_value_by_key("maxspeed:backward")) or 0 + data.barrier = way:get_value_by_key("barrier") + data.oneway = way:get_value_by_key("oneway") + data.oneway_bicycle = way:get_value_by_key("oneway:bicycle") + data.cycleway = way:get_value_by_key("cycleway") + data.cycleway_left = way:get_value_by_key("cycleway:left") + data.cycleway_right = way:get_value_by_key("cycleway:right") + data.duration = way:get_value_by_key("duration") + data.service = way:get_value_by_key("service") + data.foot = way:get_value_by_key("foot") + data.foot_forward = way:get_value_by_key("foot:forward") + data.foot_backward = way:get_value_by_key("foot:backward") + data.bicycle = way:get_value_by_key("bicycle") + + speed_handler(profile,way,result,data) + + oneway_handler(profile,way,result,data) + + cycleway_handler(profile,way,result,data) + + bike_push_handler(profile,way,result,data) + + + -- maxspeed + limit( result, data.maxspeed, data.maxspeed_forward, data.maxspeed_backward ) + + -- not routable if no speed assigned + -- this avoid assertions in debug builds + if result.forward_speed <= 0 and result.duration <= 0 then + result.forward_mode = mode.inaccessible + end + if result.backward_speed <= 0 and result.duration <= 0 then + result.backward_mode = mode.inaccessible + end + + safety_handler(profile,way,result,data) +end + + + +function speed_handler(profile,way,result,data) + + data.way_type_allows_pushing = false + + -- speed + local bridge_speed = profile.bridge_speeds[data.bridge] + if (bridge_speed and bridge_speed > 0) then + data.highway = data.bridge + if data.duration and durationIsValid(data.duration) then + result.duration = math.max( parseDuration(data.duration), 1 ) + end + result.forward_speed = bridge_speed + result.backward_speed = bridge_speed + data.way_type_allows_pushing = true + elseif profile.route_speeds[data.route] then + -- ferries (doesn't cover routes tagged using relations) + result.forward_mode = mode.ferry + result.backward_mode = mode.ferry + if data.duration and durationIsValid(data.duration) then + result.duration = math.max( 1, parseDuration(data.duration) ) + else + result.forward_speed = profile.route_speeds[data.route] + result.backward_speed = profile.route_speeds[data.route] + end + -- railway platforms (old tagging scheme) + elseif data.railway and profile.platform_speeds[data.railway] then + result.forward_speed = profile.platform_speeds[data.railway] + result.backward_speed = profile.platform_speeds[data.railway] + data.way_type_allows_pushing = true + -- public_transport platforms (new tagging platform) + elseif data.public_transport and profile.platform_speeds[data.public_transport] then + result.forward_speed = profile.platform_speeds[data.public_transport] + result.backward_speed = profile.platform_speeds[data.public_transport] + data.way_type_allows_pushing = true + -- railways + elseif profile.use_public_transport and data.railway and profile.railway_speeds[data.railway] and profile.access_tag_whitelist[data.access] then + result.forward_mode = mode.train + result.backward_mode = mode.train + result.forward_speed = profile.railway_speeds[data.railway] + result.backward_speed = profile.railway_speeds[data.railway] + elseif data.amenity and profile.amenity_speeds[data.amenity] then + -- parking areas + result.forward_speed = profile.amenity_speeds[data.amenity] + result.backward_speed = profile.amenity_speeds[data.amenity] + data.way_type_allows_pushing = true + elseif profile.bicycle_speeds[data.highway] then + -- regular ways + result.forward_speed = profile.bicycle_speeds[data.highway] + result.backward_speed = profile.bicycle_speeds[data.highway] + data.way_type_allows_pushing = true + elseif data.access and profile.access_tag_whitelist[data.access] then + -- unknown way, but valid access tag + result.forward_speed = profile.default_speed + result.backward_speed = profile.default_speed + data.way_type_allows_pushing = true + end +end + +function oneway_handler(profile,way,result,data) + -- oneway + data.implied_oneway = data.junction == "roundabout" or data.junction == "circular" or data.highway == "motorway" + data.reverse = false + + if data.oneway_bicycle == "yes" or data.oneway_bicycle == "1" or data.oneway_bicycle == "true" then + result.backward_mode = mode.inaccessible + elseif data.oneway_bicycle == "no" or data.oneway_bicycle == "0" or data.oneway_bicycle == "false" then + -- prevent other cases + elseif data.oneway_bicycle == "-1" then + result.forward_mode = mode.inaccessible + data.reverse = true + elseif data.oneway == "yes" or data.oneway == "1" or data.oneway == "true" then + result.backward_mode = mode.inaccessible + elseif data.oneway == "no" or data.oneway == "0" or data.oneway == "false" then + -- prevent other cases + elseif data.oneway == "-1" then + result.forward_mode = mode.inaccessible + data.reverse = true + elseif data.implied_oneway then + result.backward_mode = mode.inaccessible + end +end + +function cycleway_handler(profile,way,result,data) + -- cycleway + data.has_cycleway_forward = false + data.has_cycleway_backward = false + data.is_twoway = result.forward_mode ~= mode.inaccessible and result.backward_mode ~= mode.inaccessible and not data.implied_oneway + + -- cycleways on normal roads + if data.is_twoway then + if data.cycleway and profile.cycleway_tags[data.cycleway] then + data.has_cycleway_backward = true + data.has_cycleway_forward = true + end + if (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) then + data.has_cycleway_forward = true + end + if (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) then + data.has_cycleway_backward = true + end + else + local has_twoway_cycleway = (data.cycleway and profile.opposite_cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) + local has_opposite_cycleway = (data.cycleway_left and profile.opposite_cycleway_tags[data.cycleway_left]) or (data.cycleway_right and profile.opposite_cycleway_tags[data.cycleway_right]) + local has_oneway_cycleway = (data.cycleway and profile.cycleway_tags[data.cycleway]) or (data.cycleway_right and profile.cycleway_tags[data.cycleway_right]) or (data.cycleway_left and profile.cycleway_tags[data.cycleway_left]) + + -- set cycleway even though it is an one-way if opposite is tagged + if has_twoway_cycleway then + data.has_cycleway_backward = true + data.has_cycleway_forward = true + elseif has_opposite_cycleway then + if not data.reverse then + data.has_cycleway_backward = true + else + data.has_cycleway_forward = true + end + elseif has_oneway_cycleway then + if not data.reverse then + data.has_cycleway_forward = true + else + data.has_cycleway_backward = true + end + + end + end + + if data.has_cycleway_backward then + result.backward_mode = mode.cycling + result.backward_speed = profile.bicycle_speeds["cycleway"] + end + + if data.has_cycleway_forward then + result.forward_mode = mode.cycling + result.forward_speed = profile.bicycle_speeds["cycleway"] + end +end + +function bike_push_handler(profile,way,result,data) + -- pushing bikes - if no other mode found + if result.forward_mode == mode.inaccessible or result.backward_mode == mode.inaccessible or + result.forward_speed == -1 or result.backward_speed == -1 then + if data.foot ~= 'no' then + local push_forward_speed = nil + local push_backward_speed = nil + + if profile.pedestrian_speeds[data.highway] then + push_forward_speed = profile.pedestrian_speeds[data.highway] + push_backward_speed = profile.pedestrian_speeds[data.highway] + elseif data.man_made and profile.man_made_speeds[data.man_made] then + push_forward_speed = profile.man_made_speeds[data.man_made] + push_backward_speed = profile.man_made_speeds[data.man_made] + else + if data.foot == 'yes' then + push_forward_speed = profile.walking_speed + if not data.implied_oneway then + push_backward_speed = profile.walking_speed + end + elseif data.foot_forward == 'yes' then + push_forward_speed = profile.walking_speed + elseif data.foot_backward == 'yes' then + push_backward_speed = profile.walking_speed + elseif data.way_type_allows_pushing then + push_forward_speed = profile.walking_speed + if not data.implied_oneway then + push_backward_speed = profile.walking_speed + end + end + end + + if push_forward_speed and (result.forward_mode == mode.inaccessible or result.forward_speed == -1) then + result.forward_mode = mode.pushing_bike + result.forward_speed = push_forward_speed + end + if push_backward_speed and (result.backward_mode == mode.inaccessible or result.backward_speed == -1)then + result.backward_mode = mode.pushing_bike + result.backward_speed = push_backward_speed + end + + end + + end + + -- dismount + if data.bicycle == "dismount" then + result.forward_mode = mode.pushing_bike + result.backward_mode = mode.pushing_bike + result.forward_speed = profile.walking_speed + result.backward_speed = profile.walking_speed + end +end + +function safety_handler(profile,way,result,data) + -- convert duration into cyclability + if profile.properties.weight_name == 'cyclability' then + local safety_penalty = profile.unsafe_highway_list[data.highway] or 1. + local is_unsafe = safety_penalty < 1 + + -- primaries that are one ways are probably huge primaries where the lanes need to be separated + if is_unsafe and data.highway == 'primary' and not data.is_twoway then + safety_penalty = safety_penalty * 0.5 + end + if is_unsafe and data.highway == 'secondary' and not data.is_twoway then + safety_penalty = safety_penalty * 0.6 + end + + local forward_is_unsafe = is_unsafe and not data.has_cycleway_forward + local backward_is_unsafe = is_unsafe and not data.has_cycleway_backward + local is_undesireable = data.highway == "service" and profile.service_penalties[data.service] + local forward_penalty = 1. + local backward_penalty = 1. + if forward_is_unsafe then + forward_penalty = math.min(forward_penalty, safety_penalty) + end + if backward_is_unsafe then + backward_penalty = math.min(backward_penalty, safety_penalty) + end + + if is_undesireable then + forward_penalty = math.min(forward_penalty, profile.service_penalties[data.service]) + backward_penalty = math.min(backward_penalty, profile.service_penalties[data.service]) + end + + if result.forward_speed > 0 then + -- convert from km/h to m/s + result.forward_rate = result.forward_speed / 3.6 * forward_penalty + end + if result.backward_speed > 0 then + -- convert from km/h to m/s + result.backward_rate = result.backward_speed / 3.6 * backward_penalty + end + if result.duration > 0 then + result.weight = result.duration / forward_penalty + end + + if data.highway == "bicycle" then + safety_bonus = safety_bonus + 0.2 + if result.forward_speed > 0 then + -- convert from km/h to m/s + result.forward_rate = result.forward_speed / 3.6 * safety_bonus + end + if result.backward_speed > 0 then + -- convert from km/h to m/s + result.backward_rate = result.backward_speed / 3.6 * safety_bonus + end + if result.duration > 0 then + result.weight = result.duration / safety_bonus + end + end + end +end + + + +function process_way(profile, way, result) + -- the initial filtering of ways based on presence of tags + -- affects processing times significantly, because all ways + -- have to be checked. + -- to increase performance, prefetching and initial tag check + -- is done directly instead of via a handler. + + -- in general we should try to abort as soon as + -- possible if the way is not routable, to avoid doing + -- unnecessary work. this implies we should check things that + -- commonly forbids access early, and handle edge cases later. + + -- data table for storing intermediate values during processing + + local data = { + -- prefetch tags + highway = way:get_value_by_key('highway'), + + route = nil, + man_made = nil, + railway = nil, + amenity = nil, + public_transport = nil, + bridge = nil, + + access = nil, + + junction = nil, + maxspeed = nil, + maxspeed_forward = nil, + maxspeed_backward = nil, + barrier = nil, + oneway = nil, + oneway_bicycle = nil, + cycleway = nil, + cycleway_left = nil, + cycleway_right = nil, + duration = nil, + service = nil, + foot = nil, + foot_forward = nil, + foot_backward = nil, + bicycle = nil, + + way_type_allows_pushing = false, + has_cycleway_forward = false, + has_cycleway_backward = false, + is_twoway = true, + reverse = false, + implied_oneway = false + } + + local handlers = Sequence { + -- set the default mode for this profile. if can be changed later + -- in case it turns we're e.g. on a ferry + WayHandlers.default_mode, + + -- check various tags that could indicate that the way is not + -- routable. this includes things like status=impassable, + -- toll=yes and oneway=reversible + WayHandlers.blocked_ways, + + -- our main handler + handle_bicycle_tags, + + -- compute speed taking into account way type, maxspeed tags, etc. + WayHandlers.surface, + + -- handle turn lanes and road classification, used for guidance + WayHandlers.classification, + + -- handle allowed start/end modes + WayHandlers.startpoint, + + -- handle roundabouts + WayHandlers.roundabouts, + + -- set name, ref and pronunciation + WayHandlers.names, + + -- set classes + WayHandlers.classes, + + -- set weight properties of the way + WayHandlers.weights + } + + WayHandlers.run(profile, way, result, data, handlers) +end + +function process_turn(profile, turn) + -- compute turn penalty as angle^2, with a left/right bias + local normalized_angle = turn.angle / 90.0 + if normalized_angle >= 0.0 then + turn.duration = normalized_angle * normalized_angle * profile.turn_penalty / profile.turn_bias + else + turn.duration = normalized_angle * normalized_angle * profile.turn_penalty * profile.turn_bias + end + + if turn.is_u_turn then + turn.duration = turn.duration + profile.properties.u_turn_penalty + end + + if turn.has_traffic_light then + turn.duration = turn.duration + profile.properties.traffic_light_penalty + end + if profile.properties.weight_name == 'cyclability' then + turn.weight = turn.duration + end + if turn.source_mode == mode.cycling and turn.target_mode ~= mode.cycling then + turn.weight = turn.weight + profile.properties.mode_change_penalty + end +end + +return { + setup = setup, + process_way = process_way, + process_node = process_node, + process_turn = process_turn +} diff --git a/admiral-router/vehicles/lib/access.lua b/admiral-router/vehicles/lib/access.lua new file mode 100644 index 0000000..678bd5c --- /dev/null +++ b/admiral-router/vehicles/lib/access.lua @@ -0,0 +1,15 @@ +local ipairs = ipairs + +local Access = {} + +function Access.find_access_tag(source,access_tags_hierarchy) + for i,v in ipairs(access_tags_hierarchy) do + local tag = source:get_value_by_key(v) + if tag then + return tag + end + end + return nil +end + +return Access diff --git a/admiral-router/vehicles/lib/destination.lua b/admiral-router/vehicles/lib/destination.lua new file mode 100644 index 0000000..a09330a --- /dev/null +++ b/admiral-router/vehicles/lib/destination.lua @@ -0,0 +1,29 @@ +local Destination = {} + +function Destination.get_directional_tag(way, is_forward, tag) + local v + if is_forward then + v = way:get_value_by_key(tag .. ':forward') or way:get_value_by_key(tag) + else + v = way:get_value_by_key(tag .. ':backward') or way:get_value_by_key(tag) + end + if v then + return v.gsub(v, ';', ', ') + end +end + +-- Assemble destination as: "A59: Düsseldorf, Köln" +-- destination:ref ^ ^ destination + +function Destination.get_destination(way, is_forward) + ref = Destination.get_directional_tag(way, is_forward, 'destination:ref') + dest = Destination.get_directional_tag(way, is_forward, 'destination') + street = Destination.get_directional_tag(way, is_forward, 'destination:street') + if ref and dest then + return ref .. ': ' .. dest + else + return ref or dest or street or '' + end +end + +return Destination diff --git a/admiral-router/vehicles/lib/guidance.lua b/admiral-router/vehicles/lib/guidance.lua new file mode 100644 index 0000000..04e87a9 --- /dev/null +++ b/admiral-router/vehicles/lib/guidance.lua @@ -0,0 +1,173 @@ +local Tags = require('lib/tags') +local Set = require('lib/set') + +local Guidance = {} + +-- Guidance: Default Mapping from roads to types/priorities +highway_classes = { + motorway = road_priority_class.motorway, + motorway_link = road_priority_class.motorway_link, + trunk = road_priority_class.trunk, + trunk_link = road_priority_class.trunk_link, + primary = road_priority_class.primary, + primary_link = road_priority_class.primary_link, + secondary = road_priority_class.secondary, + secondary_link = road_priority_class.secondary_link, + tertiary = road_priority_class.tertiary, + tertiary_link = road_priority_class.tertiary_link, + unclassified = road_priority_class.unclassified, + residential = road_priority_class.main_residential, + service = road_priority_class.alley, + living_street = road_priority_class.side_residential, + track = road_priority_class.bike_path, + path = road_priority_class.bike_path, + footway = road_priority_class.foot_path, + pedestrian = road_priority_class.foot_path, + steps = road_priority_class.foot_path +} + +default_highway_class = road_priority_class.connectivity; + +motorway_types = Set { + 'motorway', + 'motorway_link', + 'trunk', + 'trunk_link' +} + +-- these road types are set with a car in mind. For bicycle/walk we probably need different ones +road_types = Set { + 'motorway', + 'motorway_link', + 'trunk', + 'trunk_link', + 'primary', + 'primary_link', + 'secondary', + 'secondary_link', + 'tertiary', + 'tertiary_link', + 'unclassified', + 'residential', + 'living_street' +} + +link_types = Set { + 'motorway_link', + 'trunk_link', + 'primary_link', + 'secondary_link', + 'tertiary_link' +} + +-- roads like parking lots are very unimportant for normal driving +parking_class = Set{ + 'parking_aisle', + 'driveway', + 'drive-through', + 'emergency_access' +} + +function Guidance.set_classification (highway, result, input_way) + if motorway_types[highway] then + result.road_classification.motorway_class = true + end + if link_types[highway] then + result.road_classification.link_class = true + end + + -- All service roads are recognised as alley + if highway ~= nil and highway == 'service' then + local service_type = input_way:get_value_by_key('service'); + if service_type ~= nil and parking_class[service_type] then + result.road_classification.road_priority_class = road_priority_class.alley + else + if service_type ~= nil and service_type == 'alley' then + result.road_classification.road_priority_class = road_priority_class.alley + else + if service_type == nil then + result.road_classification.road_priority_class = road_priority_class.alley + else + result.road_classification.road_priority_class = highway_classes[highway] + end + end + end + else + if highway_classes[highway] ~= nil then + result.road_classification.road_priority_class = highway_classes[highway] + else + result.road_classification.road_priority_class = default_highway_class + end + end + if road_types[highway] then + result.road_classification.may_be_ignored = false; + else + result.road_classification.may_be_ignored = true; + end + + local lane_count = input_way:get_value_by_key("lanes") + if lane_count then + local lc = tonumber(lane_count) + if lc ~= nil then + result.road_classification.num_lanes = lc + end + else + local total_count = 0 + local forward_count = input_way:get_value_by_key("lanes:forward") + if forward_count then + local fc = tonumber(forward_count) + if fc ~= nil then + total_count = fc + end + end + local backward_count = input_way:get_value_by_key("lanes:backward") + if backward_count then + local bc = tonumber(backward_count) + if bc ~= nil then + total_count = total_count + bc + end + end + if total_count ~= 0 then + result.road_classification.num_lanes = total_count + end + end +end + +-- returns forward,backward psv lane count +local function get_psv_counts(way,data) + local psv_forward, psv_backward = Tags.get_forward_backward_by_key(way,data,'lanes:psv') + if psv_forward then + psv_forward = tonumber(psv_forward) + end + if psv_backward then + psv_backward = tonumber(psv_backward) + end + return psv_forward or 0, + psv_backward or 0 +end + +-- trims lane string with regard to supported lanes +local function process_lanes(turn_lanes,vehicle_lanes,first_count,second_count) + if turn_lanes then + if vehicle_lanes then + return applyAccessTokens(turn_lanes,vehicle_lanes) + elseif first_count ~= 0 or second_count ~= 0 then + return trimLaneString(turn_lanes, first_count, second_count) + else + return turn_lanes + end + end +end + +-- this is broken for left-sided driving. It needs to switch left and right in case of left-sided driving +function Guidance.get_turn_lanes(way,data) + local psv_fw, psv_bw = get_psv_counts(way,data) + local turn_lanes_fw, turn_lanes_bw = Tags.get_forward_backward_by_key(way,data,'turn:lanes') + local vehicle_lanes_fw, vehicle_lanes_bw = Tags.get_forward_backward_by_key(way,data,'vehicle:lanes') + + --note: backward lanes swap psv_bw and psv_fw + return process_lanes(turn_lanes_fw,vehicle_lanes_fw,psv_bw,psv_fw) or turn_lanes, + process_lanes(turn_lanes_bw,vehicle_lanes_bw,psv_fw,psv_bw) or turn_lanes +end + +return Guidance diff --git a/admiral-router/vehicles/lib/maxspeed.lua b/admiral-router/vehicles/lib/maxspeed.lua new file mode 100644 index 0000000..0dd9b82 --- /dev/null +++ b/admiral-router/vehicles/lib/maxspeed.lua @@ -0,0 +1,19 @@ +local math = math + +local MaxSpeed = {} + +function MaxSpeed.limit(way,max,maxf,maxb) + if maxf and maxf>0 then + way.forward_speed = math.min(way.forward_speed, maxf) + elseif max and max>0 then + way.forward_speed = math.min(way.forward_speed, max) + end + + if maxb and maxb>0 then + way.backward_speed = math.min(way.backward_speed, maxb) + elseif max and max>0 then + way.backward_speed = math.min(way.backward_speed, max) + end +end + +return MaxSpeed diff --git a/admiral-router/vehicles/lib/measure.lua b/admiral-router/vehicles/lib/measure.lua new file mode 100644 index 0000000..0583b33 --- /dev/null +++ b/admiral-router/vehicles/lib/measure.lua @@ -0,0 +1,107 @@ +local Sequence = require('lib/sequence') + +Measure = {} + +-- measurements conversion constants +local inch_to_meters = 0.0254 +local feet_to_inches = 12 +local pound_to_kilograms = 0.45359237 +local miles_to_kilometers = 1.609 + +-- Parse speed value as kilometers by hours. +function Measure.parse_value_speed(source) + local n = tonumber(source:match("%d*")) + if n then + if string.match(source, "mph") or string.match(source, "mp/h") then + n = n * miles_to_kilometers + end + return n + end +end + +--- Parse string as a height in meters. +--- according to http://wiki.openstreetmap.org/wiki/Key:maxheight +function Measure.parse_value_meters(value) + local n = tonumber(value:gsub(",", "."):match("%d+%.?%d*")) + if n then + inches = value:match("'.*") + if inches then -- Imperial unit to metric + -- try to parse feets/inch + n = n * feet_to_inches + local m = tonumber(inches:match("%d+")) + if m then + n = n + m + end + n = n * inch_to_meters + end + return n + end +end + +--- Parse weight value in kilograms. +--- according to https://wiki.openstreetmap.org/wiki/Key:maxweight +function Measure.parse_value_kilograms(value) + local n = tonumber(value:gsub(",", "."):match("%d+%.?%d*")) + if n then + if string.match(value, "lbs") then + n = n * pound_to_kilograms + elseif string.match(value, "kg") then + -- n = n + else -- Default, metric tons + n = n * 1000 + end + return n + end +end + +--- Get maxspeed of specified way in kilometers by hours. +function Measure.get_max_speed(raw_value) + if raw_value then + return Measure.parse_value_speed(raw_value) + end +end + +-- default maxheight value defined in https://wiki.openstreetmap.org/wiki/Key:maxheight#Non-numerical_values +local default_maxheight = 4.5 +-- Available Non numerical values equal to 4.5; below_default and no_indications are not considered +local height_non_numerical_values = Set { "default", "none", "no-sign", "unsigned" } + +--- Get maxheight of specified way in meters. If there are no +--- max height, then return nil +function Measure.get_max_height(raw_value, element) + if raw_value then + if height_non_numerical_values[raw_value] then + if element then + return tonumber(element:get_location_tag('maxheight')) or default_maxheight + else + return default_maxheight + end + else + return Measure.parse_value_meters(raw_value) + end + end +end + +--- Get maxwidth of specified way in meters. +function Measure.get_max_width(raw_value) + if raw_value then + return Measure.parse_value_meters(raw_value) + end +end + +--- Get maxlength of specified way in meters. +function Measure.get_max_length(raw_value) + if raw_value then + return Measure.parse_value_meters(raw_value) + end +end + +--- Get maxweight of specified way in kilogramms. +function Measure.get_max_weight(raw_value) + if raw_value then + return Measure.parse_value_kilograms(raw_value) + end +end + + +return Measure; diff --git a/admiral-router/vehicles/lib/pprint.lua b/admiral-router/vehicles/lib/pprint.lua new file mode 100644 index 0000000..38e9db1 --- /dev/null +++ b/admiral-router/vehicles/lib/pprint.lua @@ -0,0 +1,457 @@ +-- Easy way to print data structes +-- From https://github.com/jagt/pprint.lua, file is license as pubic domain + +local pprint = { VERSION = '0.1' } + +pprint.defaults = { + -- type display trigger, hide not useful datatypes by default + -- custom types are treated as table + show_nil = true, + show_boolean = true, + show_number = true, + show_string = true, + show_table = true, + show_function = false, + show_thread = false, + show_userdata = false, + -- additional display trigger + show_metatable = false, -- show metatable + show_all = false, -- override other show settings and show everything + use_tostring = false, -- use __tostring to print table if available + filter_function = nil, -- called like callback(value[,key, parent]), return truty value to hide + object_cache = 'local', -- cache blob and table to give it a id, 'local' cache per print, 'global' cache + -- per process, falsy value to disable (might cause infinite loop) + -- format settings + indent_size = 2, -- indent for each nested table level + level_width = 80, -- max width per indent level + wrap_string = true, -- wrap string when it's longer than level_width + wrap_array = false, -- wrap every array elements + sort_keys = true, -- sort table keys +} + +local TYPES = { + ['nil'] = 1, ['boolean'] = 2, ['number'] = 3, ['string'] = 4, + ['table'] = 5, ['function'] = 6, ['thread'] = 7, ['userdata'] = 8 +} + +-- seems this is the only way to escape these, as lua don't know how to map char '\a' to 'a' +local ESCAPE_MAP = { + ['\a'] = '\\a', ['\b'] = '\\b', ['\f'] = '\\f', ['\n'] = '\\n', ['\r'] = '\\r', + ['\t'] = '\\t', ['\v'] = '\\v', ['\\'] = '\\\\', +} + +-- generic utilities +local function escape(s) + s = s:gsub('([%c\\])', ESCAPE_MAP) + local dq = s:find('"') + local sq = s:find("'") + if dq and sq then + return s:gsub('"', '\\"'), '"' + elseif sq then + return s, '"' + else + return s, "'" + end +end + +local function is_plain_key(key) + return type(key) == 'string' and key:match('^[%a_][%a%d_]*$') +end + +local CACHE_TYPES = { + ['table'] = true, ['function'] = true, ['thread'] = true, ['userdata'] = true +} + +-- cache would be populated to be like: +-- { +-- function = { `fun1` = 1, _cnt = 1 }, -- object id +-- table = { `table1` = 1, `table2` = 2, _cnt = 2 }, +-- visited_tables = { `table1` = 7, `table2` = 8 }, -- visit count +-- } +-- use weakrefs to avoid accidentall adding refcount +local function cache_apperance(obj, cache, option) + if not cache.visited_tables then + cache.visited_tables = setmetatable({}, {__mode = 'k'}) + end + local t = type(obj) + + -- TODO can't test filter_function here as we don't have the ix and key, + -- might cause different results? + -- respect show_xxx and filter_function to be consistent with print results + if (not TYPES[t] and not option.show_table) + or (TYPES[t] and not option['show_'..t]) then + return + end + + if CACHE_TYPES[t] or TYPES[t] == nil then + if not cache[t] then + cache[t] = setmetatable({}, {__mode = 'k'}) + cache[t]._cnt = 0 + end + if not cache[t][obj] then + cache[t]._cnt = cache[t]._cnt + 1 + cache[t][obj] = cache[t]._cnt + end + end + if t == 'table' or TYPES[t] == nil then + if cache.visited_tables[obj] == false then + -- already printed, no need to mark this and its children anymore + return + elseif cache.visited_tables[obj] == nil then + cache.visited_tables[obj] = 1 + else + -- visited already, increment and continue + cache.visited_tables[obj] = cache.visited_tables[obj] + 1 + return + end + for k, v in pairs(obj) do + cache_apperance(k, cache, option) + cache_apperance(v, cache, option) + end + local mt = getmetatable(obj) + if mt and option.show_metatable then + cache_apperance(mt, cache, option) + end + end +end + +-- makes 'foo2' < 'foo100000'. string.sub makes substring anyway, no need to use index based method +local function str_natural_cmp(lhs, rhs) + while #lhs > 0 and #rhs > 0 do + local lmid, lend = lhs:find('%d+') + local rmid, rend = rhs:find('%d+') + if not (lmid and rmid) then return lhs < rhs end + + local lsub = lhs:sub(1, lmid-1) + local rsub = rhs:sub(1, rmid-1) + if lsub ~= rsub then + return lsub < rsub + end + + local lnum = tonumber(lhs:sub(lmid, lend)) + local rnum = tonumber(rhs:sub(rmid, rend)) + if lnum ~= rnum then + return lnum < rnum + end + + lhs = lhs:sub(lend+1) + rhs = rhs:sub(rend+1) + end + return lhs < rhs +end + +local function cmp(lhs, rhs) + local tleft = type(lhs) + local tright = type(rhs) + if tleft == 'number' and tright == 'number' then return lhs < rhs end + if tleft == 'string' and tright == 'string' then return str_natural_cmp(lhs, rhs) end + if tleft == tright then return str_natural_cmp(tostring(lhs), tostring(rhs)) end + + -- allow custom types + local oleft = TYPES[tleft] or 9 + local oright = TYPES[tright] or 9 + return oleft < oright +end + +-- setup option with default +local function make_option(option) + if option == nil then + option = {} + end + for k, v in pairs(pprint.defaults) do + if option[k] == nil then + option[k] = v + end + if option.show_all then + for t, _ in pairs(TYPES) do + option['show_'..t] = true + end + option.show_metatable = true + end + end + return option +end + +-- override defaults and take effects for all following calls +function pprint.setup(option) + pprint.defaults = make_option(option) +end + +-- format lua object into a string +function pprint.pformat(obj, option, printer) + option = make_option(option) + local buf = {} + local function default_printer(s) + table.insert(buf, s) + end + printer = printer or default_printer + + local cache + if option.object_cache == 'global' then + -- steal the cache into a local var so it's not visible from _G or anywhere + -- still can't avoid user explicitly referentce pprint._cache but it shouldn't happen anyway + cache = pprint._cache or {} + pprint._cache = nil + elseif option.object_cache == 'local' then + cache = {} + end + + local last = '' -- used for look back and remove trailing comma + local status = { + indent = '', -- current indent + len = 0, -- current line length + } + + local wrapped_printer = function(s) + printer(last) + last = s + end + + local function _indent(d) + status.indent = string.rep(' ', d + #(status.indent)) + end + + local function _n(d) + wrapped_printer('\n') + wrapped_printer(status.indent) + if d then + _indent(d) + end + status.len = 0 + return true -- used to close bracket correctly + end + + local function _p(s, nowrap) + status.len = status.len + #s + if not nowrap and status.len > option.level_width then + _n() + wrapped_printer(s) + status.len = #s + else + wrapped_printer(s) + end + end + + local formatter = {} + local function format(v) + local f = formatter[type(v)] + f = f or formatter.table -- allow patched type() + if option.filter_function and option.filter_function(v, nil, nil) then + return '' + else + return f(v) + end + end + + local function tostring_formatter(v) + return tostring(v) + end + + local function number_formatter(n) + return n == math.huge and '[[math.huge]]' or tostring(n) + end + + local function nop_formatter(v) + return '' + end + + local function make_fixed_formatter(t, has_cache) + if has_cache then + return function (v) + return string.format('[[%s %d]]', t, cache[t][v]) + end + else + return function (v) + return '[['..t..']]' + end + end + end + + local function string_formatter(s, force_long_quote) + local s, quote = escape(s) + local quote_len = force_long_quote and 4 or 2 + if quote_len + #s + status.len > option.level_width then + _n() + -- only wrap string when is longer than level_width + if option.wrap_string and #s + quote_len > option.level_width then + -- keep the quotes together + _p('[[') + while #s + status.len >= option.level_width do + local seg = option.level_width - status.len + _p(string.sub(s, 1, seg), true) + _n() + s = string.sub(s, seg+1) + end + _p(s) -- print the remaining parts + return ']]' + end + end + + return force_long_quote and '[['..s..']]' or quote..s..quote + end + + local function table_formatter(t) + if option.use_tostring then + local mt = getmetatable(t) + if mt and mt.__tostring then + return string_formatter(tostring(t), true) + end + end + + local print_header_ix = nil + local ttype = type(t) + if option.object_cache then + local cache_state = cache.visited_tables[t] + local tix = cache[ttype][t] + -- FIXME should really handle `cache_state == nil` + -- as user might add things through filter_function + if cache_state == false then + -- already printed, just print the the number + return string_formatter(string.format('%s %d', ttype, tix), true) + elseif cache_state > 1 then + -- appeared more than once, print table header with number + print_header_ix = tix + cache.visited_tables[t] = false + else + -- appeared exactly once, print like a normal table + end + end + + local tlen = #t + local wrapped = false + _p('{') + _indent(option.indent_size) + _p(string.rep(' ', option.indent_size - 1)) + if print_header_ix then + _p(string.format('--[[%s %d]] ', ttype, print_header_ix)) + end + for ix = 1,tlen do + local v = t[ix] + if formatter[type(v)] == nop_formatter or + (option.filter_function and option.filter_function(v, ix, t)) then + -- pass + else + if option.wrap_array then + wrapped = _n() + end + _p(format(v)..', ') + end + end + + -- hashmap part of the table, in contrast to array part + local function is_hash_key(k) + local numkey = tonumber(k) + if numkey ~= k or numkey > tlen then + return true + end + end + + local function print_kv(k, v, t) + -- can't use option.show_x as obj may contain custom type + if formatter[type(v)] == nop_formatter or + formatter[type(k)] == nop_formatter or + (option.filter_function and option.filter_function(v, k, t)) then + return + end + wrapped = _n() + if is_plain_key(k) then + _p(k, true) + else + _p('[') + -- [[]] type string in key is illegal, needs to add spaces inbetween + local k = format(k) + if string.match(k, '%[%[') then + _p(' '..k..' ', true) + else + _p(k, true) + end + _p(']') + end + _p(' = ', true) + _p(format(v), true) + _p(',', true) + end + + if option.sort_keys then + local keys = {} + for k, _ in pairs(t) do + if is_hash_key(k) then + table.insert(keys, k) + end + end + table.sort(keys, cmp) + for _, k in ipairs(keys) do + print_kv(k, t[k], t) + end + else + for k, v in pairs(t) do + if is_hash_key(k) then + print_kv(k, v, t) + end + end + end + + if option.show_metatable then + local mt = getmetatable(t) + if mt then + print_kv('__metatable', mt, t) + end + end + + _indent(-option.indent_size) + -- make { } into {} + last = string.gsub(last, '^ +$', '') + -- peek last to remove trailing comma + last = string.gsub(last, ',%s*$', ' ') + if wrapped then + _n() + end + _p('}') + + return '' + end + + -- set formatters + formatter['nil'] = option.show_nil and tostring_formatter or nop_formatter + formatter['boolean'] = option.show_boolean and tostring_formatter or nop_formatter + formatter['number'] = option.show_number and number_formatter or nop_formatter -- need to handle math.huge + formatter['function'] = option.show_function and make_fixed_formatter('function', option.object_cache) or nop_formatter + formatter['thread'] = option.show_thread and make_fixed_formatter('thread', option.object_cache) or nop_formatter + formatter['userdata'] = option.show_userdata and make_fixed_formatter('userdata', option.object_cache) or nop_formatter + formatter['string'] = option.show_string and string_formatter or nop_formatter + formatter['table'] = option.show_table and table_formatter or nop_formatter + + if option.object_cache then + -- needs to visit the table before start printing + cache_apperance(obj, cache, option) + end + + _p(format(obj)) + printer(last) -- close the buffered one + + -- put cache back if global + if option.object_cache == 'global' then + pprint._cache = cache + end + + return table.concat(buf) +end + +-- pprint all the arguments +function pprint.pprint( ... ) + local args = {...} + -- select will get an accurate count of array len, counting trailing nils + local len = select('#', ...) + for ix = 1,len do + pprint.pformat(args[ix], nil, io.write) + io.write('\n') + end +end + +setmetatable(pprint, { + __call = function (_, ...) + pprint.pprint(...) + end +}) + +return pprint diff --git a/admiral-router/vehicles/lib/profile_debugger.lua b/admiral-router/vehicles/lib/profile_debugger.lua new file mode 100644 index 0000000..2768501 --- /dev/null +++ b/admiral-router/vehicles/lib/profile_debugger.lua @@ -0,0 +1,142 @@ +-- Enable calling our lua profile code directly from the lua command line, +-- which makes it easier to debug. +-- We simulate the normal C++ environment by defining the required globals and functions. + +-- See debug_example.lua for an example of how to require and use this file. + +-- for more convenient printing of tables +local pprint = require('lib/pprint') + + +-- globals that are normally set from C++ + +-- should match values defined in include/extractor/road_classification.hpp +road_priority_class = { + motorway = 0, + trunk = 2, + primary = 4, + secondary = 6, + tertiary = 8, + main_residential = 10, + side_residential = 11, + link_road = 14, + bike_path = 16, + foot_path = 18, + connectivity = 31, +} + +-- should match values defined in include/extractor/travel_mode.hpp +mode = { + inaccessible = 0, + driving = 1, + cycling = 2, + walking = 3, + ferry = 4, + train = 5, + pushing_bike = 6, +} + +-- Mock C++ helper functions which are called from LUA. +-- TODO +-- Debugging LUA code that uses these will not work correctly +-- unless we reimplement the methods in LUA. + +function durationIsValid(str) + return true +end + +function parseDuration(str) + return 1 +end + +function canonicalizeStringList(str) + return str +end + + + +-- debug helper +local Debug = {} + +-- helpers for sorting associative array +function Debug.get_keys_sorted_by_value(tbl, sortFunction) + local keys = {} + for key in pairs(tbl) do + table.insert(keys, key) + end + + table.sort(keys, function(a, b) + return sortFunction(tbl[a], tbl[b]) + end) + + return keys +end + +-- helper for printing sorted array +function Debug.print_sorted(sorted,associative) + for _, key in ipairs(sorted) do + print(associative[key], key) + end +end + +function Debug.report_tag_fetches() + print("Tag fetches:") + sorted_counts = Debug.get_keys_sorted_by_value(Debug.tags.counts, function(a, b) return a > b end) + Debug.print_sorted(sorted_counts, Debug.tags.counts) + print(Debug.tags.total, 'total') +end + +function Debug.load_profile(profile) + Debug.functions = require(profile) + Debug.profile = Debug.functions.setup() +end + +function Debug.reset_tag_fetch_counts() + Debug.tags = { + total = 0, + counts = {} + } +end + +function Debug.register_tag_fetch(k) + if Debug.tags.total then + Debug.tags.total = Debug.tags.total + 1 + else + Debug['tags']['total'] = 1 + end + + if Debug['tags']['counts'][k] then + Debug['tags']['counts'][k] = Debug['tags']['counts'][k] + 1 + else + Debug['tags']['counts'][k] = 1 + end + +end + +function Debug.process_way(way,result) + + -- setup result table + result.road_classification = {} + result.forward_speed = -1 + result.backward_speed = -1 + result.duration = 0 + result.forward_classes = {} + result.backward_classes = {} + + -- intercept tag functions normally provided via C++ + function way:get_value_by_key(k) + Debug.register_tag_fetch(k) + return self[k] + end + function way:get_location_tag(k) + return nil + end + + -- reset tag counts + Debug:reset_tag_fetch_counts() + + -- call the way processsing function + Debug.functions.process_way(Debug.profile,way,result) +end + +return Debug diff --git a/admiral-router/vehicles/lib/relations.lua b/admiral-router/vehicles/lib/relations.lua new file mode 100644 index 0000000..f061b57 --- /dev/null +++ b/admiral-router/vehicles/lib/relations.lua @@ -0,0 +1,261 @@ +-- Profile functions dealing with various aspects of relation parsing +-- +-- You can run a selection you find useful in your profile, +-- or do you own processing if/when required. + +Utils = require('lib/utils') + +Relations = {} + +function is_direction(role) + return (role == 'north' or role == 'south' or role == 'west' or role == 'east') +end + +-- match ref values to relations data +function Relations.match_to_ref(relations, ref) + + function calculate_scores(refs, tag_value) + local tag_tokens = Set(Utils.tokenize_common(tag_value)) + local result = {} + for i, r in ipairs(refs) do + local ref_tokens = Utils.tokenize_common(r) + local score = 0 + + for _, t in ipairs(ref_tokens) do + if tag_tokens[t] then + if Utils.is_number(t) then + score = score + 2 + else + score = score + 1 + end + end + end + + result[r] = score + end + + return result + end + + local references = Utils.string_list_tokens(ref) + local result_match = {} + local order = {} + for i, r in ipairs(references) do + result_match[r] = { forward = nil, backward = nil } + order[i] = r + end + + for i, rel in ipairs(relations) do + local name_scores = nil + local name_tokens = {} + local route_name = rel["route_name"] + if route_name then + name_scores = calculate_scores(references, route_name) + end + + local ref_scores = nil + local ref_tokens = {} + local route_ref = rel["route_ref"] + if route_ref then + ref_scores = calculate_scores(references, route_ref) + end + + -- merge scores + local direction = rel["route_direction"] + if direction then + local best_score = -1 + local best_ref = nil + + function find_best(scores) + if scores then + for k ,v in pairs(scores) do + if v > best_score then + best_ref = k + best_score = v + end + end + end + end + + find_best(name_scores) + find_best(ref_scores) + + if best_ref then + local result_direction = result_match[best_ref] + + local is_forward = rel["route_forward"] + if is_forward == nil then + result_direction.forward = direction + result_direction.backward = direction + elseif is_forward == true then + result_direction.forward = direction + else + result_direction.backward = direction + end + + result_match[best_ref] = result_direction + end + end + + end + + local result = {} + for i, r in ipairs(order) do + result[i] = { ref = r, dir = result_match[r] }; + end + + return result +end + +function get_direction_from_superrel(rel, relations) + local result = nil + local result_id = nil + local rel_id_list = relations:get_relations(rel) + + function set_result(direction, current_rel) + if (result ~= nil) and (direction ~= nil) then + print('WARNING: relation ' .. rel:id() .. ' is a part of more then one supperrelations ' .. result_id .. ' and ' .. current_rel:id()) + result = nil + else + result = direction + result_id = current_rel:id() + end + end + + for i, rel_id in ipairs(rel_id_list) do + local parent_rel = relations:relation(rel_id) + if parent_rel:get_value_by_key('type') == 'route' then + local role = parent_rel:get_role(rel) + + if is_direction(role) then + set_result(role, parent_rel) + else + local dir = parent_rel:get_value_by_key('direction') + if is_direction(dir) then + set_result(dir, parent_rel) + end + end + end + -- TODO: support forward/backward + end + + return result +end + +function Relations.parse_route_relation(rel, way, relations) + local t = rel:get_value_by_key("type") + local role = rel:get_role(way) + local result = {} + + function add_extra_data(m) + local name = rel:get_value_by_key("name") + if name then + result['route_name'] = name + end + + local ref = rel:get_value_by_key("ref") + if ref then + result['route_ref'] = ref + end + end + + if t == 'route' then + local role_direction = nil + local route = rel:get_value_by_key("route") + if route == 'road' then + -- process case, where directions set as role + if is_direction(role) then + role_direction = role + end + end + + local tag_direction = nil + local direction = rel:get_value_by_key('direction') + if direction then + direction = string.lower(direction) + if is_direction(direction) then + tag_direction = direction + end + end + + -- determine direction + local result_direction = role_direction + if result_direction == nil and tag_direction ~= '' then + result_direction = tag_direction + end + + if role_direction ~= nil and tag_direction ~= nil and role_direction ~= tag_direction then + result_direction = nil + print('WARNING: conflict direction in role of way ' .. way:id() .. ' and direction tag in relation ' .. rel:id()) + end + + + -- process superrelations + local super_dir = get_direction_from_superrel(rel, relations) + + -- check if there are data error + if (result_direction ~= nil) and (super_dir ~= nil) and (result_direction ~= super_dir) then + print('ERROR: conflicting relation directions found for way ' .. way:id() .. + ' relation direction is ' .. result_direction .. ' superrelation direction is ' .. super_dir) + result_direction = nil + elseif result_direction == nil then + result_direction = super_dir + end + + result['route_direction'] = result_direction + + if role == 'forward' then + result['route_forward'] = true + elseif role == 'backward' then + result['route_forward'] = false + else + result['route_forward'] = nil + end + + add_extra_data(m) + end + + return result +end + +function Relations.process_way_refs(way, relations, result) + local parsed_rel_list = {} + local rel_id_list = relations:get_relations(way) + for i, rel_id in ipairs(rel_id_list) do + local rel = relations:relation(rel_id) + parsed_rel_list[i] = Relations.parse_route_relation(rel, way, relations) + end + + -- now process relations data + local matched_refs = nil; + if result.ref then + local match_res = Relations.match_to_ref(parsed_rel_list, result.ref) + + function gen_ref(is_forward) + local ref = '' + for _, m in pairs(match_res) do + if ref ~= '' then + ref = ref .. '; ' + end + + local dir = m.dir.forward + if is_forward == false then + dir = m.dir.backward + end + + if dir then + ref = ref .. m.ref .. ' $' .. dir + else + ref = ref .. m.ref + end + end + + return ref + end + + result.forward_ref = gen_ref(true) + result.backward_ref = gen_ref(false) + end +end + +return Relations diff --git a/admiral-router/vehicles/lib/sequence.lua b/admiral-router/vehicles/lib/sequence.lua new file mode 100644 index 0000000..9cac788 --- /dev/null +++ b/admiral-router/vehicles/lib/sequence.lua @@ -0,0 +1,10 @@ +-- Sequence of items +-- Ordered, but have to loop through items to check for inclusion. +-- Currently the same as a table. +-- Adds the convenience function append() to append to the sequnce. + +function Sequence(source) + return source +end + +return Sequence \ No newline at end of file diff --git a/admiral-router/vehicles/lib/set.lua b/admiral-router/vehicles/lib/set.lua new file mode 100644 index 0000000..bbd9719 --- /dev/null +++ b/admiral-router/vehicles/lib/set.lua @@ -0,0 +1,23 @@ +-- Set of items +-- Fast check for inclusion, but unordered. +-- +-- Instead of having to do: +-- whitelist = { 'apple'=true, 'cherries'=true, 'melons'=true } +-- +-- you can do: +-- whitelist = Set { 'apple', 'cherries', 'melons' } +-- +-- and then use it as: +-- print( whitelist['cherries'] ) => true + +function Set(source) + set = {} + if source then + for i,v in ipairs(source) do + set[v] = true + end + end + return set +end + +return Set \ No newline at end of file diff --git a/admiral-router/vehicles/lib/tags.lua b/admiral-router/vehicles/lib/tags.lua new file mode 100644 index 0000000..c88afff --- /dev/null +++ b/admiral-router/vehicles/lib/tags.lua @@ -0,0 +1,131 @@ +-- Helpers for searching and parsing tags + +local Tags = {} + +-- return [forward,backward] values for a specific tag. +-- e.g. for maxspeed search forward: +-- maxspeed:forward +-- maxspeed +-- and backward: +-- maxspeed:backward +-- maxspeed + +function Tags.get_forward_backward_by_key(way,data,key) + local forward = way:get_value_by_key(key .. ':forward') + local backward = way:get_value_by_key(key .. ':backward') + + if not forward or not backward then + local common = way:get_value_by_key(key) + + if data.is_forward_oneway then + forward = forward or common + elseif data.is_reverse_oneway then + backward = backward or common + else + forward = forward or common + backward = backward or common + end + end + + return forward, backward +end + +-- return [forward,backward] values, searching a +-- prioritized sequence of tags +-- e.g. for the sequence [maxspeed,advisory] search forward: +-- maxspeed:forward +-- maxspeed +-- advisory:forward +-- advisory +-- and for backward: +-- maxspeed:backward +-- maxspeed +-- advisory:backward +-- advisory + +function Tags.get_forward_backward_by_set(way,data,keys) + local forward, backward + for i,key in ipairs(keys) do + if not forward then + forward = way:get_value_by_key(key .. ':forward') + end + if not backward then + backward = way:get_value_by_key(key .. ':backward') + end + if not forward or not backward then + local common = way:get_value_by_key(key) + forward = forward or common + backward = backward or common + end + if forward and backward then + break + end + end + + return forward, backward +end + +-- look through a sequence of keys combined with a prefix +-- e.g. for the sequence [motorcar,motor_vehicle,vehicle] and the prefix 'oneway' search for: +-- oneway:motorcar +-- oneway:motor_vehicle +-- oneway:vehicle + +function Tags.get_value_by_prefixed_sequence(way,seq,prefix) + local v + for i,key in ipairs(seq) do + v = way:get_value_by_key(prefix .. ':' .. key) + if v then + return v + end + end +end + +-- look through a sequence of keys combined with a postfix +-- e.g. for the sequence [motorcar,motor_vehicle,vehicle] and the postfix 'oneway' search for: +-- motorcar:oneway +-- motor_vehicle:oneway +-- vehicle:oneway + +function Tags.get_value_by_postfixed_sequence(way,seq,postfix) + local v + for i,key in ipairs(seq) do + v = way:get_value_by_key(key .. ':' .. postfix) + if v then + return v + end + end +end + +-- check if key-value pairs are set in a way and return a +-- corresponding constant if it is. e.g. for this input: +-- +-- local speeds = { +-- highway = { +-- residential = 20, +-- primary = 40 +-- }, +-- amenity = { +-- parking = 10 +-- } +-- } +-- +-- we would check whether the following key-value combinations +-- are set, and return the corresponding constant: +-- +-- highway = residential => 20 +-- highway = primary => 40 +-- amenity = parking => 10 + +function Tags.get_constant_by_key_value(way,lookup) + for key,set in pairs(lookup) do + local way_value = way:get_value_by_key(key) + for value,t in pairs(set) do + if way_value == value then + return key,value,t + end + end + end +end + +return Tags diff --git a/admiral-router/vehicles/lib/traffic_signal.lua b/admiral-router/vehicles/lib/traffic_signal.lua new file mode 100644 index 0000000..8356e35 --- /dev/null +++ b/admiral-router/vehicles/lib/traffic_signal.lua @@ -0,0 +1,26 @@ +-- Assigns traffic light value to node as defined by +-- include/extractor/traffic_lights.hpp + +local TrafficSignal = {} + +function TrafficSignal.get_value(node) + local tag = node:get_value_by_key("highway") + if "traffic_signals" == tag then + local direction = node:get_value_by_key("traffic_signals:direction") + if direction then + if "forward" == direction then + return traffic_lights.direction_forward + end + if "backward" == direction then + return traffic_lights.direction_reverse + end + end + -- return traffic_lights.direction_all + return true + end + -- return traffic_lights.none + return false +end + +return TrafficSignal + diff --git a/admiral-router/vehicles/lib/utils.lua b/admiral-router/vehicles/lib/utils.lua new file mode 100644 index 0000000..b349f34 --- /dev/null +++ b/admiral-router/vehicles/lib/utils.lua @@ -0,0 +1,43 @@ +-- Profile functions to implement common algorithms of data processing +-- +-- You can run a selection you find useful in your profile, +-- or do you own processing if/when required. + +Utils = {} + +-- split string 'a; b; c' to table with values ['a', 'b', 'c'] +-- so it use just one separator ';' +function Utils.string_list_tokens(str) + result = {} + local idx = 0 + for s in str.gmatch(str, "([^;]*)") do + if s ~= nil and s ~= '' then + idx = idx + 1 + result[idx] = s:gsub("^%s*(.-)%s*$", "%1") + end + end + + return result +end + +-- same as Utils.StringListTokens, but with many possible separators: +-- ',' | ';' | ' '| '(' | ')' +function Utils.tokenize_common(str) + result = {} + local idx = 0 + for s in str.gmatch(str, "%S+") do + if s ~= nil and s ~= '' then + idx = idx + 1 + result[idx] = s:gsub("^%s*(.-)%s*$", "%1") + end + end + + return result +end + +-- returns true, if string contains a number +function Utils.is_number(str) + return (tonumber(str) ~= nil) +end + +return Utils \ No newline at end of file diff --git a/admiral-router/vehicles/lib/way_handlers.lua b/admiral-router/vehicles/lib/way_handlers.lua new file mode 100644 index 0000000..b134102 --- /dev/null +++ b/admiral-router/vehicles/lib/way_handlers.lua @@ -0,0 +1,717 @@ +-- Profile handlers dealing with various aspects of tag parsing +-- +-- You can run a selection you find useful in your profile, +-- or do you own processing if/when required. + + +local get_turn_lanes = require("lib/guidance").get_turn_lanes +local set_classification = require("lib/guidance").set_classification +local get_destination = require("lib/destination").get_destination +local Tags = require('lib/tags') +local Measure = require("lib/measure") + +WayHandlers = {} + +-- check that way has at least one tag that could imply routability- +-- we store the checked tags in data, to avoid fetching again later +function WayHandlers.tag_prefetch(profile,way,result,data) + for key,v in pairs(profile.prefetch) do + data[key] = way:get_value_by_key( key ) + end + + return next(data) ~= nil +end + +-- set default mode +function WayHandlers.default_mode(profile,way,result,data) + result.forward_mode = profile.default_mode + result.backward_mode = profile.default_mode +end + +-- handles name, including ref and pronunciation +function WayHandlers.names(profile,way,result,data) + -- parse the remaining tags + local name = way:get_value_by_key("name") + local pronunciation = way:get_value_by_key("name:pronunciation") + local ref = way:get_value_by_key("ref") + local exits = way:get_value_by_key("junction:ref") + + -- Set the name that will be used for instructions + if name then + result.name = name + end + + if ref then + result.ref = canonicalizeStringList(ref, ";") + end + + if pronunciation then + result.pronunciation = pronunciation + end + + if exits then + result.exits = canonicalizeStringList(exits, ";") + end +end + +-- junctions +function WayHandlers.roundabouts(profile,way,result,data) + local junction = way:get_value_by_key("junction"); + + if junction == "roundabout" then + result.roundabout = true + end + + -- See Issue 3361: roundabout-shaped not following roundabout rules. + -- This will get us "At Strausberger Platz do Maneuver X" instead of multiple quick turns. + -- In a new API version we can think of having a separate type passing it through to the user. + if junction == "circular" then + result.circular = true + end +end + +-- determine if this way can be used as a start/end point for routing +function WayHandlers.startpoint(profile,way,result,data) + -- if profile specifies set of allowed start modes, then check for that + -- otherwise require default mode + if profile.allowed_start_modes then + result.is_startpoint = profile.allowed_start_modes[result.forward_mode] == true or + profile.allowed_start_modes[result.backward_mode] == true + else + result.is_startpoint = result.forward_mode == profile.default_mode or + result.backward_mode == profile.default_mode + end + -- highway=service and access tags check + local is_service = data.highway == "service" + if is_service then + if profile.service_access_tag_blacklist[data.forward_access] then + result.is_startpoint = false + end + end +end + +-- handle turn lanes +function WayHandlers.turn_lanes(profile,way,result,data) + local forward, backward = get_turn_lanes(way,data) + + if forward then + result.turn_lanes_forward = forward + end + + if backward then + result.turn_lanes_backward = backward + end +end + +-- set the road classification based on guidance globals configuration +function WayHandlers.classification(profile,way,result,data) + set_classification(data.highway,result,way) +end + +-- handle destination tags +function WayHandlers.destinations(profile,way,result,data) + if data.is_forward_oneway or data.is_reverse_oneway then + local destination = get_destination(way, data.is_forward_oneway) + result.destinations = canonicalizeStringList(destination, ",") + end +end + +-- handling ferries and piers +function WayHandlers.ferries(profile,way,result,data) + local route = data.route + if route then + local route_speed = profile.route_speeds[route] + if route_speed and route_speed > 0 then + local duration = way:get_value_by_key("duration") + if duration and durationIsValid(duration) then + result.duration = math.max( parseDuration(duration), 1 ) + end + result.forward_mode = mode.ferry + result.backward_mode = mode.ferry + result.forward_speed = route_speed + result.backward_speed = route_speed + end + end +end + +-- handling movable bridges +function WayHandlers.movables(profile,way,result,data) + local bridge = data.bridge + if bridge then + local bridge_speed = profile.bridge_speeds[bridge] + if bridge_speed and bridge_speed > 0 then + local capacity_car = way:get_value_by_key("capacity:car") + if capacity_car ~= 0 then + result.forward_mode = profile.default_mode + result.backward_mode = profile.default_mode + local duration = way:get_value_by_key("duration") + if duration and durationIsValid(duration) then + result.duration = math.max( parseDuration(duration), 1 ) + else + result.forward_speed = bridge_speed + result.backward_speed = bridge_speed + end + end + end + end +end + +-- service roads +function WayHandlers.service(profile,way,result,data) + local service = way:get_value_by_key("service") + if service then + -- Set don't allow access to certain service roads + if profile.service_tag_forbidden[service] then + result.forward_mode = mode.inaccessible + result.backward_mode = mode.inaccessible + return false + end + end +end + +-- all lanes restricted to hov vehicles? +function WayHandlers.has_all_designated_hov_lanes(lanes) + if not lanes then + return false + end + -- This gmatch call effectively splits the string on | chars. + -- we append an extra | to the end so that we can match the final part + for lane in (lanes .. '|'):gmatch("([^|]*)|") do + if lane and lane ~= "designated" then + return false + end + end + return true +end + +-- handle high occupancy vehicle tags +function WayHandlers.hov(profile,way,result,data) + -- respect user-preference for HOV + if not profile.avoid.hov_lanes then + return + end + + local hov = way:get_value_by_key("hov") + if "designated" == hov then + result.forward_restricted = true + result.backward_restricted = true + end + + data.hov_lanes_forward, data.hov_lanes_backward = Tags.get_forward_backward_by_key(way,data,'hov:lanes') + local all_hov_forward = WayHandlers.has_all_designated_hov_lanes(data.hov_lanes_forward) + local all_hov_backward = WayHandlers.has_all_designated_hov_lanes(data.hov_lanes_backward) + + -- in this case we will use turn penalties instead of filtering out + if profile.properties.weight_name == 'routability' then + if (all_hov_forward) then + result.forward_restricted = true + end + if (all_hov_backward) then + result.backward_restricted = true + end + return + end + + -- filter out ways where all lanes are hov only + if all_hov_forward then + result.forward_mode = mode.inaccessible + end + if all_hov_backward then + result.backward_mode = mode.inaccessible + end +end + + +-- set highway and access classification by user preference +function WayHandlers.way_classification_for_turn(profile,way,result,data) + local highway = way:get_value_by_key("highway") + local access = way:get_value_by_key("access") + + if highway and profile.highway_turn_classification[highway] then + assert(profile.highway_turn_classification[highway] < 16, "highway_turn_classification must be smaller than 16") + result.highway_turn_classification = profile.highway_turn_classification[highway] + end + if access and profile.access_turn_classification[access] then + assert(profile.access_turn_classification[access] < 16, "access_turn_classification must be smaller than 16") + result.access_turn_classification = profile.access_turn_classification[access] + end +end + + +-- check accessibility by traversing our access tag hierarchy +function WayHandlers.access(profile,way,result,data) + data.forward_access, data.backward_access = + Tags.get_forward_backward_by_set(way,data,profile.access_tags_hierarchy) + + -- only allow a subset of roads to be treated as restricted + if profile.restricted_highway_whitelist[data.highway] then + if profile.restricted_access_tag_list[data.forward_access] then + result.forward_restricted = true + end + + if profile.restricted_access_tag_list[data.backward_access] then + result.backward_restricted = true + end + end + + -- blacklist access tags that aren't marked as restricted + if profile.access_tag_blacklist[data.forward_access] and not result.forward_restricted then + result.forward_mode = mode.inaccessible + end + + if profile.access_tag_blacklist[data.backward_access] and not result.backward_restricted then + result.backward_mode = mode.inaccessible + end + + if result.forward_mode == mode.inaccessible and result.backward_mode == mode.inaccessible then + return false + end +end + +-- handle speed (excluding maxspeed) +function WayHandlers.speed(profile,way,result,data) + if result.forward_speed ~= -1 then + return -- abort if already set, eg. by a route + end + + local key,value,speed = Tags.get_constant_by_key_value(way,profile.speeds) + + if speed then + -- set speed by way type + result.forward_speed = speed + result.backward_speed = speed + else + -- Set the avg speed on ways that are marked accessible + if profile.access_tag_whitelist[data.forward_access] then + result.forward_speed = profile.default_speed + elseif data.forward_access and not profile.access_tag_blacklist[data.forward_access] then + result.forward_speed = profile.default_speed -- fallback to the avg speed if access tag is not blacklisted + elseif not data.forward_access and data.backward_access then + result.forward_mode = mode.inaccessible + end + + if profile.access_tag_whitelist[data.backward_access] then + result.backward_speed = profile.default_speed + elseif data.backward_access and not profile.access_tag_blacklist[data.backward_access] then + result.backward_speed = profile.default_speed -- fallback to the avg speed if access tag is not blacklisted + elseif not data.backward_access and data.forward_access then + result.backward_mode = mode.inaccessible + end + end + + if result.forward_speed == -1 and result.backward_speed == -1 and result.duration <= 0 then + return false + end +end + +-- add class information +function WayHandlers.classes(profile,way,result,data) + if not profile.classes then + return + end + + local allowed_classes = Set {} + for k, v in pairs(profile.classes) do + allowed_classes[v] = true + end + + local forward_toll, backward_toll = Tags.get_forward_backward_by_key(way, data, "toll") + local forward_route, backward_route = Tags.get_forward_backward_by_key(way, data, "route") + local tunnel = way:get_value_by_key("tunnel") + + if allowed_classes["tunnel"] and tunnel and tunnel ~= "no" then + result.forward_classes["tunnel"] = true + result.backward_classes["tunnel"] = true + end + + if allowed_classes["toll"] and forward_toll == "yes" then + result.forward_classes["toll"] = true + end + if allowed_classes["toll"] and backward_toll == "yes" then + result.backward_classes["toll"] = true + end + + if allowed_classes["ferry"] and forward_route == "ferry" then + result.forward_classes["ferry"] = true + end + if allowed_classes["ferry"] and backward_route == "ferry" then + result.backward_classes["ferry"] = true + end + + if allowed_classes["restricted"] and result.forward_restricted then + result.forward_classes["restricted"] = true + end + if allowed_classes["restricted"] and result.backward_restricted then + result.backward_classes["restricted"] = true + end + + if allowed_classes["motorway"] and (data.highway == "motorway" or data.highway == "motorway_link") then + result.forward_classes["motorway"] = true + result.backward_classes["motorway"] = true + end +end + +-- reduce speed on bad surfaces +function WayHandlers.surface(profile,way,result,data) + local surface = way:get_value_by_key("surface") + local tracktype = way:get_value_by_key("tracktype") + local smoothness = way:get_value_by_key("smoothness") + + if surface and profile.surface_speeds[surface] then + result.forward_speed = math.min(profile.surface_speeds[surface], result.forward_speed) + result.backward_speed = math.min(profile.surface_speeds[surface], result.backward_speed) + end + if tracktype and profile.tracktype_speeds[tracktype] then + result.forward_speed = math.min(profile.tracktype_speeds[tracktype], result.forward_speed) + result.backward_speed = math.min(profile.tracktype_speeds[tracktype], result.backward_speed) + end + if smoothness and profile.smoothness_speeds[smoothness] then + result.forward_speed = math.min(profile.smoothness_speeds[smoothness], result.forward_speed) + result.backward_speed = math.min(profile.smoothness_speeds[smoothness], result.backward_speed) + end +end + +-- scale speeds to get better average driving times +function WayHandlers.penalties(profile,way,result,data) + -- heavily penalize a way tagged with all HOV lanes + -- in order to only route over them if there is no other option + local service_penalty = 1.0 + local service = way:get_value_by_key("service") + if service and profile.service_penalties[service] then + service_penalty = profile.service_penalties[service] + end + + local width_penalty = 1.0 + local width = math.huge + local lanes = math.huge + local width_string = way:get_value_by_key("width") + if width_string and tonumber(width_string:match("%d*")) then + width = tonumber(width_string:match("%d*")) + end + + local lanes_string = way:get_value_by_key("lanes") + if lanes_string and tonumber(lanes_string:match("%d*")) then + lanes = tonumber(lanes_string:match("%d*")) + end + + local is_bidirectional = result.forward_mode ~= mode.inaccessible and + result.backward_mode ~= mode.inaccessible + + if width <= 3 or (lanes <= 1 and is_bidirectional) then + width_penalty = 0.5 + end + + -- Handle high frequency reversible oneways (think traffic signal controlled, changing direction every 15 minutes). + -- Scaling speed to take average waiting time into account plus some more for start / stop. + local alternating_penalty = 1.0 + if data.oneway == "alternating" then + alternating_penalty = 0.4 + end + + local sideroad_penalty = 1.0 + data.sideroad = way:get_value_by_key("side_road") + if "yes" == data.sideroad or "rotary" == data.sideroad then + sideroad_penalty = profile.side_road_multiplier + end + + local forward_penalty = math.min(service_penalty, width_penalty, alternating_penalty, sideroad_penalty) + local backward_penalty = math.min(service_penalty, width_penalty, alternating_penalty, sideroad_penalty) + + if profile.properties.weight_name == 'routability' then + if result.forward_speed > 0 then + result.forward_rate = (result.forward_speed * forward_penalty) / 3.6 + end + if result.backward_speed > 0 then + result.backward_rate = (result.backward_speed * backward_penalty) / 3.6 + end + if result.duration > 0 then + result.weight = result.duration / forward_penalty + end + end +end + +-- maxspeed and advisory maxspeed +function WayHandlers.maxspeed(profile,way,result,data) + local keys = Sequence { 'maxspeed:advisory', 'maxspeed', 'source:maxspeed', 'maxspeed:type' } + local forward, backward = Tags.get_forward_backward_by_set(way,data,keys) + forward = WayHandlers.parse_maxspeed(forward,profile) + backward = WayHandlers.parse_maxspeed(backward,profile) + + if forward and forward > 0 then + result.forward_speed = forward * profile.speed_reduction + end + + if backward and backward > 0 then + result.backward_speed = backward * profile.speed_reduction + end +end + +function WayHandlers.parse_maxspeed(source,profile) + if not source then + return 0 + end + + local n = Measure.get_max_speed(source) + if not n then + -- parse maxspeed like FR:urban + source = string.lower(source) + n = profile.maxspeed_table[source] + if not n then + local highway_type = string.match(source, "%a%a:(%a+)") + n = profile.maxspeed_table_default[highway_type] + if not n then + n = 0 + end + end + end + return n +end + +-- handle maxheight tags +function WayHandlers.handle_height(profile,way,result,data) + local keys = Sequence { 'maxheight:physical', 'maxheight' } + local forward, backward = Tags.get_forward_backward_by_set(way,data,keys) + forward = Measure.get_max_height(forward,way) + backward = Measure.get_max_height(backward,way) + + if forward and forward < profile.vehicle_height then + result.forward_mode = mode.inaccessible + end + + if backward and backward < profile.vehicle_height then + result.backward_mode = mode.inaccessible + end +end + +-- handle maxwidth tags +function WayHandlers.handle_width(profile,way,result,data) + local keys = Sequence { 'maxwidth:physical', 'maxwidth', 'width', 'est_width' } + local forward, backward = Tags.get_forward_backward_by_set(way,data,keys) + local narrow = way:get_value_by_key('narrow') + + if ((forward and forward == 'narrow') or (narrow and narrow == 'yes')) and profile.vehicle_width > 2.2 then + result.forward_mode = mode.inaccessible + elseif forward then + forward = Measure.get_max_width(forward) + if forward and forward <= profile.vehicle_width then + result.forward_mode = mode.inaccessible + end + end + + if ((backward and backward == 'narrow') or (narrow and narrow == 'yes')) and profile.vehicle_width > 2.2 then + result.backward_mode = mode.inaccessible + elseif backward then + backward = Measure.get_max_width(backward) + if backward and backward <= profile.vehicle_width then + result.backward_mode = mode.inaccessible + end + end +end + +-- handle maxweight tags +function WayHandlers.handle_weight(profile,way,result,data) + local keys = Sequence { 'maxweight' } + local forward, backward = Tags.get_forward_backward_by_set(way,data,keys) + forward = Measure.get_max_weight(forward) + backward = Measure.get_max_weight(backward) + + if forward and forward < profile.vehicle_weight then + result.forward_mode = mode.inaccessible + end + + if backward and backward < profile.vehicle_weight then + result.backward_mode = mode.inaccessible + end +end + +-- handle maxlength tags +function WayHandlers.handle_length(profile,way,result,data) + local keys = Sequence { 'maxlength' } + local forward, backward = Tags.get_forward_backward_by_set(way,data,keys) + forward = Measure.get_max_length(forward) + backward = Measure.get_max_length(backward) + + if forward and forward < profile.vehicle_length then + result.forward_mode = mode.inaccessible + end + + if backward and backward < profile.vehicle_length then + result.backward_mode = mode.inaccessible + end +end + +-- handle oneways tags +function WayHandlers.oneway(profile,way,result,data) + if not profile.oneway_handling then + return + end + + local oneway + if profile.oneway_handling == true then + oneway = Tags.get_value_by_prefixed_sequence(way,profile.restrictions,'oneway') or way:get_value_by_key("oneway") + elseif profile.oneway_handling == 'specific' then + oneway = Tags.get_value_by_prefixed_sequence(way,profile.restrictions,'oneway') + elseif profile.oneway_handling == 'conditional' then + -- Following code assumes that `oneway` and `oneway:conditional` tags have opposite values and takes weakest (always `no`). + -- So if we will have: + -- oneway=yes, oneway:conditional=no @ (condition1) + -- oneway=no, oneway:conditional=yes @ (condition2) + -- condition1 will be always true and condition2 will be always false. + if way:get_value_by_key("oneway:conditional") then + oneway = "no" + else + oneway = Tags.get_value_by_prefixed_sequence(way,profile.restrictions,'oneway') or way:get_value_by_key("oneway") + end + end + + data.oneway = oneway + + if oneway == "-1" then + data.is_reverse_oneway = true + result.forward_mode = mode.inaccessible + elseif oneway == "yes" or + oneway == "1" or + oneway == "true" then + data.is_forward_oneway = true + result.backward_mode = mode.inaccessible + elseif profile.oneway_handling == true then + local junction = way:get_value_by_key("junction") + if data.highway == "motorway" or + junction == "roundabout" or + junction == "circular" then + if oneway ~= "no" then + -- implied oneway + data.is_forward_oneway = true + result.backward_mode = mode.inaccessible + end + end + end +end + +function WayHandlers.weights(profile,way,result,data) + if profile.properties.weight_name == 'distance' then + result.weight = -1 + -- set weight rates to 1 for the distance weight, edge weights are distance / rate + if (result.forward_mode ~= mode.inaccessible and result.forward_speed > 0) then + result.forward_rate = 1 + end + if (result.backward_mode ~= mode.inaccessible and result.backward_speed > 0) then + result.backward_rate = 1 + end + end +end + + +-- handle general avoid rules + +function WayHandlers.avoid_ways(profile,way,result,data) + if profile.avoid[data.highway] then + return false + end +end + +-- handle various that can block access +function WayHandlers.blocked_ways(profile,way,result,data) + + -- areas + if profile.avoid.area and way:get_value_by_key("area") == "yes" then + return false + end + + -- toll roads + if profile.avoid.toll and way:get_value_by_key("toll") == "yes" then + return false + end + + -- don't route over steps + if profile.avoid.steps and data.highway == "steps" then + return false + end + + -- construction + -- TODO if highway is valid then we shouldn't check railway, and vica versa + if profile.avoid.construction and (data.highway == 'construction' or way:get_value_by_key('railway') == 'construction') then + return false + end + + -- In addition to the highway=construction tag above handle the construction=* tag + -- http://wiki.openstreetmap.org/wiki/Key:construction + -- https://taginfo.openstreetmap.org/keys/construction#values + if profile.avoid.construction then + local construction = way:get_value_by_key('construction') + + -- Of course there are negative tags to handle, too + if construction and not profile.construction_whitelist[construction] then + return false + end + end + + -- Not only are there multiple construction tags there is also a proposed=* tag. + -- http://wiki.openstreetmap.org/wiki/Key:proposed + -- https://taginfo.openstreetmap.org/keys/proposed#values + if profile.avoid.proposed and way:get_value_by_key('proposed') then + return false + end + + -- Reversible oneways change direction with low frequency (think twice a day): + -- do not route over these at all at the moment because of time dependence. + -- Note: alternating (high frequency) oneways are handled below with penalty. + if profile.avoid.reversible and way:get_value_by_key("oneway") == "reversible" then + return false + end + + -- impassables + if profile.avoid.impassable then + if way:get_value_by_key("impassable") == "yes" then + return false + end + + if way:get_value_by_key("status") == "impassable" then + return false + end + end +end + +function WayHandlers.driving_side(profile, way, result, data) + local driving_side = way:get_value_by_key('driving_side') + if driving_side == nil then + driving_side = way:get_location_tag('driving_side') + end + + if driving_side == 'left' then + result.is_left_hand_driving = true + elseif driving_side == 'right' then + result.is_left_hand_driving = false + else + result.is_left_hand_driving = profile.properties.left_hand_driving + end +end + + +-- Call a sequence of handlers, aborting in case a handler returns false. Example: +-- +-- handlers = Sequence { +-- WayHandlers.tag_prefetch, +-- WayHandlers.default_mode, +-- WayHandlers.blocked_ways, +-- WayHandlers.access, +-- WayHandlers.speed, +-- WayHandlers.names +-- } +-- +-- WayHandlers.run(handlers,way,result,data,profile) +-- +-- Each method in the list will be called on the WayHandlers object. +-- All handlers must accept the parameteres (profile, way, result, data, relations) and return false +-- if the handler chain should be aborted. +-- To ensure the correct order of method calls, use a Sequence of handler names. + +function WayHandlers.run(profile, way, result, data, handlers, relations) + for i,handler in ipairs(handlers) do + if handler(profile, way, result, data, relations) == false then + return false + end + end +end + +return WayHandlers diff --git a/admiral-router/vehicles/mk.lua b/admiral-router/vehicles/mk.lua new file mode 100644 index 0000000..08d6d61 --- /dev/null +++ b/admiral-router/vehicles/mk.lua @@ -0,0 +1,504 @@ +-- Car profile + +api_version = 4 + +Set = require('lib/set') +Sequence = require('lib/sequence') +Handlers = require("lib/way_handlers") +Relations = require("lib/relations") +find_access_tag = require("lib/access").find_access_tag +limit = require("lib/maxspeed").limit +Utils = require("lib/utils") +Measure = require("lib/measure") + +function setup() + return { + properties = { + max_speed_for_map_matching = 100/3.6, -- 180kmph -> m/s + -- For routing based on duration, but weighted for preferring certain roads +-- weight_name = 'routability', + -- For shortest duration without penalties for accessibility + weight_name = 'duration', + -- For shortest distance without penalties for accessibility + -- weight_name = 'distance', + process_call_tagless_node = false, + u_turn_penalty = 20, + continue_straight_at_waypoint = true, + use_turn_restrictions = true, + left_hand_driving = false, + traffic_light_penalty = 2, + }, + + default_mode = mode.driving, + default_speed = 23, + oneway_handling = true, + side_road_multiplier = 0.9, + turn_penalty = 4, + speed_reduction = 0.9, + turn_bias = 1.05, + cardinal_directions = false, + + -- Size of the vehicle, to be limited by physical restriction of the way + vehicle_height = 1.5, -- in meters, 2.0m is the height slightly above biggest SUVs + vehicle_width = 1.0, -- in meters, ways with narrow tag are considered narrower than 2.2m + + -- Size of the vehicle, to be limited mostly by legal restriction of the way + vehicle_length = 2, -- in meters, 4.8m is the length of large or family car + vehicle_weight = 200, -- in kilograms + + -- a list of suffixes to suppress in name change instructions. The suffixes also include common substrings of each other + suffix_list = { + 'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East', 'Nor', 'Sou', 'We', 'Ea' + }, + + barrier_whitelist = Set { + 'cattle_grid', + 'border_control', + 'toll_booth', + 'sally_port', + 'gate', + 'lift_gate', + 'no', + 'entrance', + 'height_restrictor', + 'arch' + }, + + access_tag_whitelist = Set { + 'yes', + 'motorcar', + "motorcycle", + 'motor_vehicle', + 'vehicle', + 'permissive', + 'designated', + 'hov' + }, + + access_tag_blacklist = Set { + 'no', + 'destination' + }, + + -- tags disallow access to in combination with highway=service + service_access_tag_blacklist = Set { + 'private' + }, + + restricted_access_tag_list = Set { + 'destination', + }, + + access_tags_hierarchy = Sequence { + 'motorcar', + 'motor_vehicle', + 'vehicle', + 'access' + }, + + service_tag_forbidden = Set { + }, + + restrictions = Sequence { + 'motorcar', + 'motor_vehicle', + 'vehicle' + }, + + classes = Sequence { + 'toll', 'motorway', 'ferry', 'restricted', 'tunnel' + }, + + -- classes to support for exclude flags + excludable = Sequence { + Set {'toll'}, + Set {'motorway'}, + Set {'ferry'} + }, + + avoid = Set { + 'area', + -- 'toll', -- uncomment this to avoid tolls + 'reversible', + 'impassable', + 'hov_lanes', + 'steps', + 'construction', + 'proposed' + }, + + speeds = Sequence { + highway = { + motorway = 100, + motorway_link = 50, + trunk = 90, + trunk_link = 40, + primary = 50, + primary_link = 30, + secondary = 50, + secondary_link = 30, + tertiary = 25, + tertiary_link = 25, + unclassified = 25, + track = 20, + residential = 14, + living_street = 10, + service = 10 + } + }, + + service_penalties = { + alley = 0.5, + parking = 0.5, + parking_aisle = 0.5, + driveway = 0.5, + ["drive-through"] = 0.5, + ["drive-thru"] = 0.5 + }, + + restricted_highway_whitelist = Set { + 'motorway', + 'motorway_link', + 'trunk', + 'trunk_link', + 'primary', + 'primary_link', + 'secondary', + 'secondary_link', + 'tertiary', + 'tertiary_link', + 'residential', + 'living_street', + 'unclassified', + 'service', + 'track' + }, + + construction_whitelist = Set { + 'no', + 'widening', + 'minor', + }, + + route_speeds = { + ferry = 5, + shuttle_train = 10 + }, + + bridge_speeds = { + movable = 5 + }, + + -- surface/trackype/smoothness + -- values were estimated from looking at the photos at the relevant wiki pages + + -- max speed for surfaces + surface_speeds = { + asphalt = nil, -- nil mean no limit. removing the line has the same effect + concrete = nil, + ["concrete:plates"] = nil, + ["concrete:lanes"] = nil, + paved = nil, + + cement = 80, + compacted = 80, + fine_gravel = 80, + + paving_stones = 60, + metal = 60, + bricks = 60, + + grass = 40, + wood = 40, + sett = 40, + grass_paver = 40, + gravel = 40, + unpaved = 40, + ground = 40, + dirt = 40, + pebblestone = 40, + tartan = 40, + + cobblestone = 30, + clay = 30, + + earth = 20, + stone = 20, + rocky = 20, + sand = 20, + + mud = 10 + }, + + -- max speed for tracktypes + tracktype_speeds = { + grade1 = 60, + grade2 = 40, + grade3 = 30, + grade4 = 25, + grade5 = 20 + }, + + -- max speed for smoothnesses + smoothness_speeds = { + intermediate = 80, + bad = 40, + very_bad = 20, + horrible = 10, + very_horrible = 5, + impassable = 0 + }, + + -- http://wiki.openstreetmap.org/wiki/Speed_limits + maxspeed_table_default = { + urban = 50, + rural = 90, + trunk = 100, + motorway = 100 + }, + + -- List only exceptions + maxspeed_table = { + ["at:rural"] = 100, + ["at:trunk"] = 100, + ["be:motorway"] = 120, + ["be-bru:rural"] = 70, + ["be-bru:urban"] = 30, + ["be-vlg:rural"] = 70, + ["by:urban"] = 60, + ["by:motorway"] = 100, + ["ch:rural"] = 80, + ["ch:trunk"] = 100, + ["ch:motorway"] = 100, + ["cz:trunk"] = 0, + ["cz:motorway"] = 0, + ["de:living_street"] = 7, + ["de:rural"] = 100, + ["de:motorway"] = 0, + ["dk:rural"] = 80, + ["fr:rural"] = 80, + ["gb:nsl_single"] = (60*1609)/1000, + ["gb:nsl_dual"] = (70*1609)/1000, + ["gb:motorway"] = (70*1609)/1000, + ["nl:rural"] = 80, + ["nl:trunk"] = 100, + ['no:rural'] = 80, + ['no:motorway'] = 100, + ['pl:rural'] = 100, + ['pl:trunk'] = 100, + ['pl:motorway'] = 100, + ["ro:trunk"] = 100, + ["ru:living_street"] = 20, + ["ru:urban"] = 60, + ["ru:motorway"] = 100, + ["uk:nsl_single"] = (60*1609)/1000, + ["uk:nsl_dual"] = (70*1609)/1000, + ["uk:motorway"] = (70*1609)/1000, + ['za:urban'] = 60, + ['za:rural'] = 100, + ["none"] = 100 + }, + + relation_types = Sequence { + "route" + }, + + -- classify highway tags when necessary for turn weights + highway_turn_classification = { + }, + + -- classify access tags when necessary for turn weights + access_turn_classification = { + } + } +end + +function process_node(profile, node, result, relations) + -- parse access and barrier tags + local access = find_access_tag(node, profile.access_tags_hierarchy) + if access then + if profile.access_tag_blacklist[access] and not profile.restricted_access_tag_list[access] then + result.barrier = true + end + else + local barrier = node:get_value_by_key("barrier") + if barrier then + -- check height restriction barriers + local restricted_by_height = false + if barrier == 'height_restrictor' then + local maxheight = Measure.get_max_height(node:get_value_by_key("maxheight"), node) + restricted_by_height = maxheight and maxheight < profile.vehicle_height + end + + -- make an exception for rising bollard barriers + local bollard = node:get_value_by_key("bollard") + local rising_bollard = bollard and "rising" == bollard + + -- make an exception for lowered/flat barrier=kerb + -- and incorrect tagging of highway crossing kerb as highway barrier + local kerb = node:get_value_by_key("kerb") + local highway = node:get_value_by_key("highway") + local flat_kerb = kerb and ("lowered" == kerb or "flush" == kerb) + local highway_crossing_kerb = barrier == "kerb" and highway and highway == "crossing" + + if not profile.barrier_whitelist[barrier] + and not rising_bollard + and not flat_kerb + and not highway_crossing_kerb + or restricted_by_height then + result.barrier = true + end + end + end + + -- check if node is a traffic light + local tag = node:get_value_by_key("highway") + if "traffic_signals" == tag then + result.traffic_lights = true + end +end + +function process_way(profile, way, result, relations) + -- the intial filtering of ways based on presence of tags + -- affects processing times significantly, because all ways + -- have to be checked. + -- to increase performance, prefetching and intial tag check + -- is done in directly instead of via a handler. + + -- in general we should try to abort as soon as + -- possible if the way is not routable, to avoid doing + -- unnecessary work. this implies we should check things that + -- commonly forbids access early, and handle edge cases later. + + -- data table for storing intermediate values during processing + local data = { + -- prefetch tags + highway = way:get_value_by_key('highway'), + bridge = way:get_value_by_key('bridge'), + route = way:get_value_by_key('route') + } + + -- perform an quick initial check and abort if the way is + -- obviously not routable. + -- highway or route tags must be in data table, bridge is optional + if (not data.highway or data.highway == '') and + (not data.route or data.route == '') + then + return + end + + handlers = Sequence { + -- set the default mode for this profile. if can be changed later + -- in case it turns we're e.g. on a ferry + WayHandlers.default_mode, + + -- check various tags that could indicate that the way is not + -- routable. this includes things like status=impassable, + -- toll=yes and oneway=reversible + WayHandlers.blocked_ways, + WayHandlers.avoid_ways, + WayHandlers.handle_height, + WayHandlers.handle_width, + WayHandlers.handle_length, + WayHandlers.handle_weight, + + -- determine access status by checking our hierarchy of + -- access tags, e.g: motorcar, motor_vehicle, vehicle + WayHandlers.access, + + -- check whether forward/backward directions are routable + WayHandlers.oneway, + + -- check a road's destination + WayHandlers.destinations, + + -- check whether we're using a special transport mode + WayHandlers.ferries, + WayHandlers.movables, + + -- handle service road restrictions + WayHandlers.service, + + -- handle hov + WayHandlers.hov, + + -- compute speed taking into account way type, maxspeed tags, etc. + WayHandlers.speed, + WayHandlers.maxspeed, + WayHandlers.surface, + WayHandlers.penalties, + + -- compute class labels + WayHandlers.classes, + + -- handle turn lanes and road classification, used for guidance + WayHandlers.turn_lanes, + WayHandlers.classification, + + -- handle various other flags + WayHandlers.roundabouts, + WayHandlers.startpoint, + WayHandlers.driving_side, + + -- set name, ref and pronunciation + WayHandlers.names, + + -- set weight properties of the way + WayHandlers.weights, + + -- set classification of ways relevant for turns + WayHandlers.way_classification_for_turn + } + + WayHandlers.run(profile, way, result, data, handlers, relations) + + if profile.cardinal_directions then + Relations.process_way_refs(way, relations, result) + end +end + +function process_turn(profile, turn) + -- Use a sigmoid function to return a penalty that maxes out at turn_penalty + -- over the space of 0-180 degrees. Values here were chosen by fitting + -- the function to some turn penalty samples from real driving. + local turn_penalty = profile.turn_penalty + local turn_bias = turn.is_left_hand_driving and 1. / profile.turn_bias or profile.turn_bias + + if turn.has_traffic_light then + turn.duration = profile.properties.traffic_light_penalty + end + + if turn.number_of_roads > 2 or turn.source_mode ~= turn.target_mode or turn.is_u_turn then + if turn.angle >= 0 then + turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 / turn_bias) * turn.angle/180 - 6.5*turn_bias))) + else + turn.duration = turn.duration + turn_penalty / (1 + math.exp( -((13 * turn_bias) * -turn.angle/180 - 6.5/turn_bias))) + end + + if turn.is_u_turn then + turn.duration = turn.duration + profile.properties.u_turn_penalty + end + end + + -- for distance based routing we don't want to have penalties based on turn angle + if profile.properties.weight_name == 'distance' then + turn.weight = 0 + else + turn.weight = turn.duration + end + + if profile.properties.weight_name == 'routability' then + -- penalize turns from non-local access only segments onto local access only tags + if not turn.source_restricted and turn.target_restricted then + turn.weight = constants.max_turn_weight + end + end +end + + +return { + setup = setup, + process_way = process_way, + process_node = process_node, + process_turn = process_turn +} diff --git a/admiral-router/vehicles/walk.lua b/admiral-router/vehicles/walk.lua new file mode 100644 index 0000000..4c60bfb --- /dev/null +++ b/admiral-router/vehicles/walk.lua @@ -0,0 +1,264 @@ +-- Foot profile + +api_version = 2 + +Set = require('lib/set') +Sequence = require('lib/sequence') +Handlers = require("lib/way_handlers") +find_access_tag = require("lib/access").find_access_tag + +function setup() + local max_speed = 5 + local walking_speed = 5 + return { + properties = { + weight_name = 'duration', + max_speed_for_map_matching = max_speed/3.6, -- kmph -> m/s + call_tagless_node_function = false, + traffic_light_penalty = 2, + u_turn_penalty = 2, + continue_straight_at_waypoint = false, + use_turn_restrictions = false, + }, + + default_mode = mode.walking, + default_speed = walking_speed, + oneway_handling = 'specific', -- respect 'oneway:foot' but not 'oneway' + + barrier_blacklist = Set { + 'yes', + 'wall', + 'fence' + }, + + access_tag_whitelist = Set { + 'yes', + 'foot', + 'permissive', + 'designated' + }, + + access_tag_blacklist = Set { + 'no', + }, + + restricted_access_tag_list = Set { }, + + restricted_highway_whitelist = Set { }, + + construction_whitelist = Set {}, + + access_tags_hierarchy = Sequence { + 'foot', + 'access' + }, + + -- tags disallow access to in combination with highway=service + service_access_tag_blacklist = Set { }, + + restrictions = Sequence { + 'foot' + }, + + -- list of suffixes to suppress in name change instructions + suffix_list = Set { + 'N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'North', 'South', 'West', 'East' + }, + + avoid = Set { + 'impassable' + }, + + speeds = Sequence { + highway = { + primary = walking_speed, + primary_link = walking_speed, + secondary = walking_speed, + secondary_link = walking_speed, + tertiary = walking_speed, + tertiary_link = walking_speed, + unclassified = walking_speed, + residential = walking_speed, + road = walking_speed, + living_street = walking_speed, + service = walking_speed, + track = walking_speed, + path = walking_speed, + steps = walking_speed, + pedestrian = walking_speed, + footway = walking_speed, + pier = walking_speed, + }, + + railway = { + platform = walking_speed + }, + + amenity = { + parking = walking_speed, + parking_entrance= walking_speed + }, + + man_made = { + pier = walking_speed + }, + + leisure = { + track = walking_speed + } + }, + + route_speeds = { + ferry = 5 + }, + + bridge_speeds = { + }, + + surface_speeds = { + fine_gravel = walking_speed*0.75, + gravel = walking_speed*0.75, + pebblestone = walking_speed*0.75, + mud = walking_speed*0.5, + sand = walking_speed*0.5 + }, + + tracktype_speeds = { + }, + + smoothness_speeds = { + } + } +end + +function process_node(profile, node, result) + -- parse access and barrier tags + local access = find_access_tag(node, profile.access_tags_hierarchy) + if access then + if profile.access_tag_blacklist[access] then + result.barrier = true + end + else + local barrier = node:get_value_by_key("barrier") + if barrier then + -- make an exception for rising bollard barriers + local bollard = node:get_value_by_key("bollard") + local rising_bollard = bollard and "rising" == bollard + + if profile.barrier_blacklist[barrier] and not rising_bollard then + result.barrier = true + end + end + end + + -- check if node is a traffic light + local tag = node:get_value_by_key("highway") + if "traffic_signals" == tag then + result.traffic_lights = true + end +end + +-- main entry point for processsing a way +function process_way(profile, way, result) + -- the intial filtering of ways based on presence of tags + -- affects processing times significantly, because all ways + -- have to be checked. + -- to increase performance, prefetching and intial tag check + -- is done in directly instead of via a handler. + + -- in general we should try to abort as soon as + -- possible if the way is not routable, to avoid doing + -- unnecessary work. this implies we should check things that + -- commonly forbids access early, and handle edge cases later. + + -- data table for storing intermediate values during processing + local data = { + -- prefetch tags + highway = way:get_value_by_key('highway'), + bridge = way:get_value_by_key('bridge'), + route = way:get_value_by_key('route'), + leisure = way:get_value_by_key('leisure'), + man_made = way:get_value_by_key('man_made'), + railway = way:get_value_by_key('railway'), + platform = way:get_value_by_key('platform'), + amenity = way:get_value_by_key('amenity'), + public_transport = way:get_value_by_key('public_transport') + } + + -- perform an quick initial check and abort if the way is + -- obviously not routable. here we require at least one + -- of the prefetched tags to be present, ie. the data table + -- cannot be empty + if next(data) == nil then -- is the data table empty? + return + end + + local handlers = Sequence { + -- set the default mode for this profile. if can be changed later + -- in case it turns we're e.g. on a ferry + WayHandlers.default_mode, + + -- check various tags that could indicate that the way is not + -- routable. this includes things like status=impassable, + -- toll=yes and oneway=reversible + WayHandlers.blocked_ways, + + -- determine access status by checking our hierarchy of + -- access tags, e.g: motorcar, motor_vehicle, vehicle + WayHandlers.access, + + -- check whether forward/backward directons are routable + WayHandlers.oneway, + + -- check whether forward/backward directons are routable + WayHandlers.destinations, + + -- check whether we're using a special transport mode + WayHandlers.ferries, + WayHandlers.movables, + + -- compute speed taking into account way type, maxspeed tags, etc. + WayHandlers.speed, + WayHandlers.surface, + + -- handle turn lanes and road classification, used for guidance + WayHandlers.classification, + + -- handle various other flags + WayHandlers.roundabouts, + WayHandlers.startpoint, + + -- set name, ref and pronunciation + WayHandlers.names, + + -- set weight properties of the way + WayHandlers.weights + } + + WayHandlers.run(profile, way, result, data, handlers) +end + +function process_turn (profile, turn) + turn.duration = 0. + + if turn.direction_modifier == direction_modifier.u_turn then + turn.duration = turn.duration + profile.properties.u_turn_penalty + end + + if turn.has_traffic_light then + turn.duration = profile.properties.traffic_light_penalty + end + if profile.properties.weight_name == 'routability' then + -- penalize turns from non-local access only segments onto local access only tags + if not turn.source_restricted and turn.target_restricted then + turn.weight = turn.weight + 3000 + end + end +end + +return { + setup = setup, + process_way = process_way, + process_node = process_node, + process_turn = process_turn +} diff --git a/admiral-worker/.editorconfig b/admiral-worker/.editorconfig new file mode 100644 index 0000000..91fea9b --- /dev/null +++ b/admiral-worker/.editorconfig @@ -0,0 +1,8 @@ +root = true + +[*] +indent_style = tab +insert_final_newline = true +max_line_length = 150 +tab_width = 4 +trim_trailing_whitespace = true diff --git a/admiral-worker/.gitignore b/admiral-worker/.gitignore new file mode 100644 index 0000000..3a94e43 --- /dev/null +++ b/admiral-worker/.gitignore @@ -0,0 +1,25 @@ +.idea +.vscode + +# Secrets +.env* + +# Auto generated elements +/logs +venv +.coverage +htmlcov + +# Big files +!.gitkeep +data/graph-hopper/** +data/updater/** +data/ftp/** +data/worker/** +data/snapshot/** +data/mock/** +app/notebooks/inputs/** +app/notebooks/outputs/** +app/notebooks/calculated_stop_times/** +# Custom files for valgalla +custom_files diff --git a/admiral-worker/Dockerfile b/admiral-worker/Dockerfile new file mode 100644 index 0000000..d2d1cb4 --- /dev/null +++ b/admiral-worker/Dockerfile @@ -0,0 +1,5 @@ +FROM python:3.11 +WORKDIR /app +COPY . . +RUN pip install -r requirements.txt +ENTRYPOINT ["python", "cli"] diff --git a/admiral-worker/Makefile b/admiral-worker/Makefile new file mode 100644 index 0000000..8d40f34 --- /dev/null +++ b/admiral-worker/Makefile @@ -0,0 +1,23 @@ +include buildSrc/common.mk + +.PHONY: api app core data tests + +init: ## start virtual environment and install dev. requirements + #sudo apt install python3-virtualenv + + rm -fr $(VIRTUAL_ENV) + virtualenv -p python3 $(VIRTUAL_ENV) + $(MAKE) install + +install: ## install development libs + pip install -r requirements.txt + +tests: ## execute test suite + python3 -m unittest discover tests "test_*.py" + +coverage: ## create HTML coverage report + coverage run --source=app,core --omit=core/services/*,core/repos/* -m unittest discover tests "test_*.py" && coverage html + +export PYTHONPATH=$PYTHONPATH:.:.venv/bin/ +run_optimization_worker: + .venv/bin/python ./cli/run_optimization_worker.py diff --git a/admiral-worker/app/App.py b/admiral-worker/app/App.py new file mode 100644 index 0000000..621c8e0 --- /dev/null +++ b/admiral-worker/app/App.py @@ -0,0 +1,363 @@ +from typing import Optional + +from dotenv import load_dotenv + +from app.repos.sql.CrnMicroUpdateSqlRepo import CrnMicroUpdateSqlRepo +from app.repos.sql.GpsServiceTimeSqlRepo import GpsServiceTimeSqlRepo +from app.repos.sql.GpsSqlRepo import GpsSqlRepo +from app.repos.sql.GpsStopTimeSqlRepo import GpsStopTimeSqlRepo +from app.repos.sql.GpsDeliverySqlRepo import GpsDeliverySqlRepo +from app.repos.sql.OptimizationMetricsSqlRepo import OptimizationMetricsSqlRepo +from app.services.FsFtpService import FsFtpService +from app.services.OsrmRoutingService import OsrmRoutingService +from app.services.SolvesallOptimizationService import SolvesallOptimizationService +from core.domain.worker.Worker import Worker +from core.repos.CrnMicroUpdateRepo import CrnMicroUpdateRepo +from core.repos.GpsDeliveryRepo import GpsDeliveryRepo +from core.repos.GpsRepo import GpsRepo +from core.repos.GpsServiceTimeRepo import GpsServiceTimeRepo +from core.repos.GpsStopTimeRepo import GpsStopTimeRepo +from core.types.Id import Id +from core.usecases.Print_file_hashes import Print_file_hashes +from core.usecases.Run_gps_worker import Run_gps_worker +from core.usecases.gps_worker.Analyze_delivery_data import Analyze_delivery_data +from core.usecases.gps_worker.Analyze_gps_data import Analyze_gps_data +from core.usecases.gps_worker.Match_crnPoints_with_allLandPlots import Match_crnPoints_with_allLandPlots +from core.usecases.gps_worker.Match_crnPoints_with_landPlots import Match_crnPoints_with_landPlots +from core.usecases.gps_worker.Update_service_times import Update_service_times +from core.usecases.optimization_worker.Update_optimization_points import Update_optimization_points +from core.usecases.updating_worker.Test_transport_matrix import Test_transport_matrix + +load_dotenv() + +from sqlalchemy import create_engine + +from app.Env import Env +from app.repos.sql.GpsSessionSqlRepo import GpsSessionSqlRepo +from app.repos.sql.OptimizationResultSqlRepo import OptimizationResultSqlRepo +from app.repos.sql.OptimizationSqlRepo import OptimizationSqlRepo +from app.repos.sql.OptimizationVehicleSqlRepo import OptimizationVehicleSqlRepo +from app.repos.sql.PostOfficeSqlRepo import PostOfficeSqlRepo +from app.repos.sql.WorkerJobLogSqlRepo import WorkerJobLogSqlRepo +from app.repos.sql.WorkerJobSqlRepo import WorkerJobSqlRepo +from app.repos.sql.WorkerJobStatusSqlRepo import WorkerJobStatusSqlRepo +from app.repos.sql.WorkerLogSqlRepo import WorkerLogSqlRepo +from app.repos.sql.WorkerSqlRepo import WorkerSqlRepo +from app.repos.sql.WorkerStatusSqlRepo import WorkerStatusSqlRepo +from app.services.EProstorLandService import EProstorLandService +from app.services.PostaApiService import PostaApiService +from app.services.PsutilSystemService import PsutilSystemService +from core.domain.worker.WorkerJob import WorkerJob +from core.repos.GpsSessionRepo import GpsSessionRepo +from core.repos.OptimizationRepo import OptimizationRepo +from core.repos.OptimizationResultRepo import OptimizationResultRepo +from core.repos.OptimizationVehicleRepo import OptimizationVehicleRepo +from core.repos.PostOfficeRepo import PostOfficeRepo +from core.repos.WorkerJobLogRepo import WorkerJobLogRepo +from core.repos.WorkerJobRepo import WorkerJobRepo +from core.repos.WorkerJobStatusRepo import WorkerJobStatusRepo +from core.repos.WorkerLogRepo import WorkerLogRepo +from core.repos.WorkerRepo import WorkerRepo +from core.repos.WorkerStatusRepo import WorkerStatusRepo +from core.services.FtpService import FtpService +from core.services.LandService import LandService +from core.services.OptimizationService import OptimizationService +from core.services.PostaService import PostaService +from core.services.RoutingService import RoutingService +from core.services.SystemService import SystemService +from core.usecases.Run_optimization_worker import Run_optimization_worker +from core.usecases.Run_updating_worker import Run_updating_worker +from core.usecases.optimization_worker.Read_optimization_files import Read_optimization_files +from core.usecases.updating_worker.Calculate_optimization_points import Calculate_optimization_points +from core.usecases.updating_worker.Write_optimization_files import Write_optimization_files +from core.usecases.initialization.Register_worker import Register_worker +from core.usecases.optimization_worker.Run_optimization_job import Run_optimization_job +from core.usecases.logging.Log_worker_job_status import Log_worker_job_status +from core.usecases.logging.Log_worker_status import Log_worker_status + + +class App: + # REPOS + class repos: + optimizationResultRepo: OptimizationResultRepo = None + optimizationRepo: OptimizationRepo = None + optimizationVehicleRepo: OptimizationVehicleRepo = None + workerRepo: WorkerRepo = None + workerLogRepo: WorkerLogRepo = None + workerStatusRepo: WorkerStatusRepo = None + workerJobRepo: WorkerJobRepo = None + workerJobStatusRepo: WorkerJobStatusRepo = None + workerJobLogRepo: WorkerJobLogRepo = None + gpsSessionRepo: GpsSessionRepo = None + gpsRepo: GpsRepo = None + gpsStopTimeRepo: GpsStopTimeRepo = None + gpsServiceTimeRepo: GpsServiceTimeRepo = None + gpsDeliveryRepo: GpsDeliveryRepo = None + postOfficeRepo: PostOfficeRepo = None + crnMicroUpdateRepo: CrnMicroUpdateRepo = None + + # SERVICES + class services: + postaService: PostaService = None + systemService: SystemService = None + ftpService: FtpService = None + routingService: RoutingService = None + landService: LandService = None + optimizationService: OptimizationService = None + + # USE CASES + class usecases: + run_gps_worker: Run_gps_worker = None + print_file_hashes: Print_file_hashes = None + analyze_gps_data: Analyze_gps_data = None + analyze_delivery_data: Analyze_delivery_data = None + update_service_times: Update_service_times = None + match_crnPoints_with_landPlots: Match_crnPoints_with_landPlots = None + match_crnPoints_with_allLandPlots: Match_crnPoints_with_allLandPlots = None + test_transport_matrix: Test_transport_matrix = None + log_worker_job_status: Log_worker_job_status = None + log_worker_status: Log_worker_status = None + register_worker: Register_worker = None + run_optimization_worker: Run_optimization_worker = None + run_updating_worker: Run_updating_worker = None + write_optimization_files: Write_optimization_files = None + calculate_optimization_points: Calculate_optimization_points = None + read_optimization_files: Read_optimization_files = None + update_optimization_points: Update_optimization_points = None + + @staticmethod + def init_log_worker_status(id: Id[Worker]): + db = create_engine(Env.DB_URL) + workerStatusRepo = WorkerStatusSqlRepo(db=db) + workerRepo = WorkerSqlRepo(db=db) + systemService = PsutilSystemService() + log_worker_status = Log_worker_status( + workerRepo=workerRepo, + workerStatusRepo=workerStatusRepo, + systemService=systemService, + ) + log_worker_status.now(id=id) + + @staticmethod + def init_run_worker_optimization_job(workerJob: WorkerJob): + db = create_engine(Env.DB_URL) + + # REPOS + optimizationRepo = OptimizationSqlRepo(db=db) + optimizationResultRepo = OptimizationResultSqlRepo(db=db) + optimizationVehicleRepo = OptimizationVehicleSqlRepo(db=db) + workerJobStatusRepo = WorkerJobStatusSqlRepo(db=db) + workerJobLogRepo = WorkerJobLogSqlRepo(db=db) + workerLogRepo = WorkerLogSqlRepo(db=db) + postOfficeRepo = PostOfficeSqlRepo(db=db) + gpsStopTimeRepo = GpsStopTimeSqlRepo(db=db) + gpsServiceTimeRepo = GpsServiceTimeSqlRepo(db=db) + optimizationMetricsRepo = OptimizationMetricsSqlRepo(db=db) + gpsSessionRepo = GpsSessionSqlRepo(db=db) + + # SERVICES + ftpService = FsFtpService() + systemService = PsutilSystemService() + routingService = OsrmRoutingService(domain=Env.ROUTING_DOMAIN_OSRM) + optimizationService = SolvesallOptimizationService() + postaService = PostaApiService( + useMocks=Env.POSTA_API_USE_MOCKS, + authDomain=Env.POSTA_API_AUTH, + crnDomain=Env.POSTA_API_CRN, + deliveryDomain=Env.POSTA_API_DELIVERY, + username=Env.POSTA_API_USERNAME, + password=Env.POSTA_API_PASSWORD, + authToken=Env.POSTA_API_AUTH_TOKEN, + crnToken=Env.POSTA_API_CRN_TOKEN, + deliveryToken=Env.POSTA_API_DELIVERY_TOKEN, + ) + + + # USE CASES + log_worker_job_status = Log_worker_job_status( + workerJobStatusRepo=workerJobStatusRepo, + systemService=systemService, + ) + + update_service_times = Update_service_times( + gpsStopTimeRepo=gpsStopTimeRepo, + gpsServiceTimeRepo=gpsServiceTimeRepo, + postaService=postaService, + gpsSessionRepo=gpsSessionRepo, + ) + + read_optimization_files = Read_optimization_files(ftpService=ftpService) + + update_optimization_points = Update_optimization_points( + gpsStopTimeRepo=gpsStopTimeRepo, + gpsServiceTimeRepo=gpsServiceTimeRepo, + update_service_times=update_service_times + ) + + run_worker_optimization_job = Run_optimization_job( + optimizationMetricsRepo=optimizationMetricsRepo, + systemService=systemService, + workerLogRepo=workerLogRepo, + workerJobLogRepo=workerJobLogRepo, + log_worker_job_status=log_worker_job_status, + optimizationVehicleRepo=optimizationVehicleRepo, + optimizationRepo=optimizationRepo, + postOfficeRepo=postOfficeRepo, + read_optimization_files=read_optimization_files, + optimizationResultRepo=optimizationResultRepo, + optimizationService=optimizationService, + routingService=routingService, + update_optimization_points=update_optimization_points + ) + + run_worker_optimization_job.now(workerJob=workerJob) + + @staticmethod + def init(maxCrnPoints: Optional[int] = None): + db = create_engine(Env.DB_URL) + + # REPOS + App.repos.optimizationRepo = OptimizationSqlRepo(db=db) + App.repos.optimizationResultRepo = OptimizationResultSqlRepo(db=db) + App.repos.optimizationVehicleRepo = OptimizationVehicleSqlRepo(db=db) + App.repos.workerRepo = WorkerSqlRepo(db=db) + App.repos.workerLogRepo = WorkerLogSqlRepo(db=db) + App.repos.workerStatusRepo = WorkerStatusSqlRepo(db=db) + App.repos.workerJobRepo = WorkerJobSqlRepo(db=db) + App.repos.workerJobStatusRepo = WorkerJobStatusSqlRepo(db=db) + App.repos.workerJobLogRepo = WorkerJobLogSqlRepo(db=db) + App.repos.gpsSessionRepo = GpsSessionSqlRepo(db=db) + App.repos.gpsRepo = GpsSqlRepo(db=db) + App.repos.gpsStopTimeRepo = GpsStopTimeSqlRepo(db=db) + App.repos.gpsServiceTimeRepo = GpsServiceTimeSqlRepo(db=db) + App.repos.postOfficeRepo = PostOfficeSqlRepo(db=db) + App.repos.crnMicroUpdateRepo = CrnMicroUpdateSqlRepo(db=db) + App.repos.gpsDeliveryRepo = GpsDeliverySqlRepo(db=db) + + # SERVICES + App.services.routingService = OsrmRoutingService(domain=Env.ROUTING_DOMAIN_OSRM) + App.services.systemService = PsutilSystemService() + App.services.optimizationService = SolvesallOptimizationService() + App.services.landService = EProstorLandService() + App.services.postaService = PostaApiService( + useMocks=Env.POSTA_API_USE_MOCKS, + authDomain=Env.POSTA_API_AUTH, + crnDomain=Env.POSTA_API_CRN, + deliveryDomain=Env.POSTA_API_DELIVERY, + username=Env.POSTA_API_USERNAME, + password=Env.POSTA_API_PASSWORD, + authToken=Env.POSTA_API_AUTH_TOKEN, + crnToken=Env.POSTA_API_CRN_TOKEN, + maxCrnPoints=maxCrnPoints, + deliveryToken=Env.POSTA_API_DELIVERY_TOKEN, + ) + App.services.ftpService = FsFtpService() + + # USE CASES + App.usecases.print_file_hashes = Print_file_hashes() + + App.usecases.log_worker_job_status = Log_worker_job_status( + workerJobStatusRepo=App.repos.workerJobStatusRepo, + systemService=App.services.systemService, + ) + + App.usecases.update_optimization_points = Update_optimization_points( + gpsStopTimeRepo=App.repos.gpsStopTimeRepo, + gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo, + update_service_times=App.usecases.update_service_times + ) + + App.usecases.log_worker_status = Log_worker_status( + workerRepo=App.repos.workerRepo, + workerStatusRepo=App.repos.workerStatusRepo, + systemService=App.services.systemService + ) + App.usecases.register_worker = Register_worker( + workerRepo=App.repos.workerRepo, + workerStatusRepo=App.repos.workerStatusRepo, + workerLogRepo=App.repos.workerLogRepo, + systemService=App.services.systemService, + ) + App.usecases.run_optimization_worker = Run_optimization_worker( + register_worker=App.usecases.register_worker, + workerLogRepo=App.repos.workerLogRepo, + optimizationRepo=App.repos.optimizationRepo, + workerJobRepo=App.repos.workerJobRepo, + init_run_optimization_job=App.usecases.init_run_worker_optimization_job, + init_log_worker_status=App.usecases.init_log_worker_status, + systemService=App.services.systemService, + workerRepo=App.repos.workerRepo + ) + + App.usecases.read_optimization_files = Read_optimization_files(ftpService=App.services.ftpService) + App.usecases.calculate_optimization_points = Calculate_optimization_points( + gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo, + crnMicroUpdateRepo=App.repos.crnMicroUpdateRepo, + ) + App.usecases.test_transport_matrix = Test_transport_matrix(routingService=App.services.routingService) + App.usecases.match_crnPoints_with_landPlots = Match_crnPoints_with_landPlots( + landService=App.services.landService, + postaService=App.services.postaService, + ) + App.usecases.match_crnPoints_with_allLandPlots = Match_crnPoints_with_allLandPlots( + landService=App.services.landService, + postaService=App.services.postaService, + ) + App.usecases.write_optimization_files = Write_optimization_files( + postOfficeRepo=App.repos.postOfficeRepo, + crnMicroUpdateRepo=App.repos.crnMicroUpdateRepo, + postaService=App.services.postaService, + ftpService=App.services.ftpService, + routingService=App.services.routingService, + calculate_optimization_points=App.usecases.calculate_optimization_points, + match_crnPoints_with_landPlots=App.usecases.match_crnPoints_with_landPlots + ) + App.usecases.run_updating_worker = Run_updating_worker( + register_worker=App.usecases.register_worker, + workerLogRepo=App.repos.workerLogRepo, + postOfficeRepo=App.repos.postOfficeRepo, + write_optimization_files=App.usecases.write_optimization_files, + gpsSessionRepo=App.repos.gpsSessionRepo, + init_log_worker_status=App.usecases.init_log_worker_status, + ftpService=App.services.ftpService, + ) + + App.usecases.analyze_gps_data = Analyze_gps_data(gpsRepo=App.repos.gpsRepo, postaService=App.services.postaService) + App.usecases.analyze_delivery_data = Analyze_delivery_data(postaService=App.services.postaService) + + App.usecases.update_service_times = Update_service_times( + gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo, + gpsStopTimeRepo=App.repos.gpsStopTimeRepo, + postaService=App.services.postaService, + gpsSessionRepo=App.repos.gpsSessionRepo + ) + + App.usecases.run_gps_worker = Run_gps_worker( + init_log_worker_status=App.usecases.init_log_worker_status, + register_worker=App.usecases.register_worker, + workerLogRepo=App.repos.workerLogRepo, + analyze_gps_data=App.usecases.analyze_gps_data, + gpsSessionRepo=App.repos.gpsSessionRepo, + gpsStopTimeRepo=App.repos.gpsStopTimeRepo, + crnMicroUpdateRepo=App.repos.crnMicroUpdateRepo, + update_service_times=App.usecases.update_service_times, + analyze_delivery_data=App.usecases.analyze_delivery_data, + gpsDeliveryRepo=App.repos.gpsDeliveryRepo, + gpsServiceTimeRepo=App.repos.gpsServiceTimeRepo, + match_crn_points_with_land_plots=App.usecases.match_crnPoints_with_landPlots, + postaService=App.services.postaService + ) + + App.__check() + + @staticmethod + def __check(): + not_inited = [] + for group in [App.repos, App.services, App.usecases]: + for key, value in group.__dict__.items(): + if not str(key).startswith("_") and key not in ['init']: + if value is None: + not_inited.append(key) + + if len(not_inited) > 0: + raise Exception(f"App dependencies not inited: {not_inited}") diff --git a/admiral-worker/app/Env.py b/admiral-worker/app/Env.py new file mode 100644 index 0000000..59c484e --- /dev/null +++ b/admiral-worker/app/Env.py @@ -0,0 +1,22 @@ +import os + +from dotenv import load_dotenv + +load_dotenv() + + +class Env: + + DB_URL: str = os.getenv("DB_URL") + + POSTA_API_USE_MOCKS: bool = bool(int(os.getenv("POSTA_API_USE_MOCKS"))) + POSTA_API_AUTH: str = os.getenv("POSTA_API_AUTH") + POSTA_API_CRN: str = os.getenv("POSTA_API_CRN") + POSTA_API_DELIVERY: str = os.getenv("POSTA_API_DELIVERY") + POSTA_API_USERNAME: str = os.getenv("POSTA_API_USERNAME") + POSTA_API_PASSWORD: str = os.getenv("POSTA_API_PASSWORD") + POSTA_API_AUTH_TOKEN: str = os.getenv("POSTA_API_AUTH_TOKEN") + POSTA_API_CRN_TOKEN: str = os.getenv("POSTA_API_CRN_TOKEN") + POSTA_API_DELIVERY_TOKEN: str = os.getenv("POSTA_API_DELIVERY_TOKEN") + + ROUTING_DOMAIN_OSRM: str = os.getenv("ROUTING_DOMAIN_OSRM") diff --git a/admiral-worker/app/algorithms/OrToolsOptimizationService.py b/admiral-worker/app/algorithms/OrToolsOptimizationService.py new file mode 100644 index 0000000..5606bc0 --- /dev/null +++ b/admiral-worker/app/algorithms/OrToolsOptimizationService.py @@ -0,0 +1,260 @@ +import json +import logging +from datetime import timedelta +from typing import Literal, Optional + +import numpy as np +import pandas as pd +from pydantic import BaseModel, PositiveInt, PositiveFloat, NonNegativeInt, model_validator, ConfigDict +from pydantic.alias_generators import to_camel +from typing_extensions import Self +from typing_extensions import override + +from app.algorithms import solver_or +from core.types.Logger import Logger + + +class BaseSchema(BaseModel): + model_config = ConfigDict( + alias_generator=to_camel, populate_by_name=True, extra="allow" + ) + + +class OrToolsOptimizationVehicle(BaseSchema): + id: NonNegativeInt + name: str + + route_type: str + capacity: PositiveInt + range_km: PositiveFloat + working_time_h: Optional[PositiveFloat] = 8.0 + districts: list[str] + priority: Optional[bool] = False + + @model_validator(mode="after") + def check_values(self) -> Self: + # assert 0 < self.range_km <= 1_000, f"Range should be between 0 and 1000 km." + # assert 0 < self.capacity <= 1_000 + # assert 0 < self.working_time_h <= 10, f"Max working time is 10h." + return self + + +class OrToolsOptimizationPoint(BaseSchema): + id: NonNegativeInt + hisa_id: str + + service_time_sec: NonNegativeInt + demand: Optional[NonNegativeInt] = 1 + freq: Optional[float] = 1.0 + type: Literal['crn', 'depot', 'refill'] + + lat: float + lon: float + + district: Optional[str] = None + + @model_validator(mode="after") + def check_values(self) -> Self: + # TODO: assert 0 <= self.service_time_sec <= 1200, f"Service time too large: {self.service_time_sec}." + assert 0 <= self.demand < 1_000, f"Demand too large {self.demand}" + assert 0 <= self.freq <= 1, f"Frequency not between 0 and 1.0 {self.freq}" + assert self.type != 'depot' or self.district is None, "Depot can't have an assigned district." + assert 45 <= self.lat <= 47 and 13 <= self.lon <= 17, f"Invalid coordinates {self.lat}, {self.lon}" + + return self + + +def to_np_array_int(df, col): + df = df.sort_values('start_hisa') + n = len(df['start_hisa'].unique()) + dm = np.full((n, n), 10 ** 9, dtype=int) + dm[df['start_index'], df['end_index']] = df[col] + return dm + + +class OrToolsOptimizationInstance(BaseSchema): + class Config: + arbitrary_types_allowed = True + + class JsonEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, OrToolsOptimizationInstance): + return obj.model_dump() + if isinstance(obj, np.ndarray): + all = {} + for y, line in enumerate(obj.tolist()): + for x, ele in enumerate(line): + all[f"{y}_{x}"] = ele + return all + if isinstance(obj, dict): + return str(obj) + return super().default(obj) + + vehicles: list[OrToolsOptimizationVehicle] + points: list[OrToolsOptimizationPoint] + distance_matrix: dict[str, np.ndarray] + time_matrix: dict[str, np.ndarray] + initial_routes: Optional[list[list[NonNegativeInt]]] + district_percentage: Optional[float] = 0.0 + log: Logger + + # time_dist_data: dict[Literal['bike', 'car', 'foot'], pd.DataFrame] + + # @computed_field(return_type=dict[Literal['bike', 'car', 'foot'], np.ndarray]) + # @cached_property + # def distance_matrix(self): + # return {key: to_np_array_int(df, 'distance') for key, df in self.time_dist_data.items()} + + # @computed_field(return_type=dict[Literal['bike', 'car', 'foot'], np.ndarray]) + # @cached_property + # def time_matrix(self): + # return {key: to_np_array_int(df, 'duration') for key, df in self.time_dist_data.items()} + + @model_validator(mode="after") + def check_values(self) -> Self: + availableCapacity = sum([o.capacity for o in self.vehicles]) + requiredCapacity = sum([o.demand for o in self.points]) + assert availableCapacity >= requiredCapacity, f"Available capacity '{availableCapacity}' is less than required capacity '{requiredCapacity}'" + + if self.district_percentage is not None: + assert 0.0 <= self.district_percentage <= 1.0, f"District percentage has to be between 0 and 1 (float)." + + # for k, df in self.time_dist_data.items(): + # assert set(df['start_index']) == set(df['end_index']), "Sources and destinations should be the same." + + for k, v in self.distance_matrix.items(): + assert len(self.points) == v.shape[0], f"Number of points({len(self.points)} should be same as distance_matrix size({v.shape[0]})" + assert v.shape[0] == v.shape[1], "Both dimensions of distance_matrix should be of equal size" + assert all(np.array(sorted([x.id for x in self.points])) == np.arange(v.shape[0])), "Point.id should be its index in distance_matrix." + + for k, v in self.time_matrix.items(): + assert len(self.points) == v.shape[0], "Number of points should be same as time_matrix size" + assert v.shape[0] == v.shape[1], "Both dimensions of time_matrix should be of equal size" + + assert all(np.issubdtype(v.dtype, np.floating) for v in self.distance_matrix.values()), "Distance matrix should be of type np.integer" + assert all(np.issubdtype(v.dtype, np.floating) for v in self.time_matrix.values()), "Time matrix should be of type np.integer" + + for k, v in self.distance_matrix.items(): + # assert v.max() <= 100_000, f"Some values in distance_matrix '{k}' are larger than 100 km." + # assert v.mean() >= 1_000, f"Mean of values in distance_matrix '{k}' is smaller than 1000 m. Check why are values so big!" + if v.max() > 100_000: + self.log.warning(f"Some values in distance_matrix '{k}' are to big: {v.max()}") + if v.mean() < 1_000: + self.log.warning(f"Mean of values in distance_matrix '{k}' are to big: {v.mean()}") + + # TODO: check matrix + return self + + +class OrToolsOptimizationSolution(BaseSchema): + vehicle_id: NonNegativeInt + dummy: bool + hisa_ids: list[str] + distance: NonNegativeInt + duration: timedelta + cost: NonNegativeInt + district: Optional[str] = None # TODO: solver_or needs to assign district names when doing exact optimization!!! + + +class OrToolsOptimizationConfig(BaseSchema): + objective: Literal['distance', 'time'] = 'time' + vehicle_cost: Optional[int] = solver_or.VEHICLE_COST + + district_penalty: NonNegativeInt = 0 + district_mode: Literal['single', 'subsets', 'hard'] = 'soft' + + set_initial: bool = False + + useDistrictCentrality: bool = True + + +class OrToolsOptimizationService: + """ + Main class for doing optimization + """ + + @override + def vrpOptimization( + self, + solving_time_sec: int, + instance: OrToolsOptimizationInstance, + config: OrToolsOptimizationConfig, + log: Logger, + solution_callback_fn=lambda objective, raw_solution, overlapping: None, + stop_callback_fn=lambda: False, + ) -> tuple[int, list[OrToolsOptimizationSolution], dict[int, float]]: + + log.info("Mapping optimizationVehicles") + opta_vehicles = pd.DataFrame([x.__dict__ for x in instance.vehicles]) + opta_vehicles['cost'] = config.vehicle_cost + opta_vehicles.loc[opta_vehicles['priority'], 'cost'] = solver_or.VEHICLE_PRIORITY_COST + opta_vehicles['max_time'] = (opta_vehicles['working_time_h'] * 3600).astype(int) + opta_vehicles['range'] = (opta_vehicles['range_km'] * 1000).astype(int) + + if solver_or.VEHICLE_DUPLICATE_FACTOR > 1: + vn = len(opta_vehicles) + opta_vehicles = pd.concat([opta_vehicles] * solver_or.VEHICLE_DUPLICATE_FACTOR).reset_index(drop=True) + opta_vehicles.loc[opta_vehicles.index[vn:], 'cost'] = solver_or.VEHICLE_DUPLICATE_COST + log.info("Mapping optimization points") + opta_points = pd.DataFrame([x.__dict__ for x in instance.points]) + opta_points['service_time'] = opta_points['service_time_sec'] + opta_points['base_point'] = np.arange(len(opta_points)) + + opta_instance = solver_or.VrpInstance(opta_vehicles, opta_points, instance.distance_matrix, instance.time_matrix, instance.initial_routes, + instance.district_percentage) + + def calculate_overlapping(solution, instance) -> Optional[dict[int, float]]: + if not config.set_initial: + return None + overlapping: dict[int, float] = {} + for id, vehicle, type, route, total_distance, total_time, total_cost, num_points, orig_id, dummy in solution.to_records(index=False): + if len(instance.initial_routes) == orig_id: + break + initial_route = set(instance.initial_routes[orig_id]) + route = set(route) + addedPoints = route - initial_route + if len(initial_route) > 0: + overlapping[int(vehicle)] = round(100 * len(addedPoints) / len(initial_route), 3) + return overlapping + + def map_raw_solution(objective: int, solution: pd.DataFrame) -> tuple[int, list[OrToolsOptimizationSolution], dict[int, float]]: + solution = solution[solution['total_distance'] > 0].copy() + + solution['orig_id'] = solution['vehicle'].apply(lambda x: x % len(instance.vehicles)) + solution = solution.reset_index() + solution['dummy'] = solution['orig_id'].duplicated() + + readings = [kwargs for kwargs in solution.to_dict(orient='records')] + + optimizationSolutions = [] + id2point = {x.id: x for x in instance.points} + + for reading in readings: + hisa_ids = [id2point[i].hisa_id for i in reading['route']] + vehicle_id = reading['orig_id'] + dummy = reading['dummy'] + + optimizationSolutions.append( + OrToolsOptimizationSolution( + vehicle_id=vehicle_id, + dummy=dummy, + hisa_ids=hisa_ids, + distance=reading['total_distance'], + duration=timedelta(seconds=reading['total_time']), + cost=reading['total_cost'] + ) + ) + + return objective, optimizationSolutions, calculate_overlapping(solution=solution, instance=instance) + + log.info(f"Solving VRP with points (without depot): {len(opta_instance.nodes) - 1}") + objective, solution = solver_or.solve( + opta_instance, config, solving_time_sec, + solution_callback_fn=lambda objec, raw_solution: solution_callback_fn(*map_raw_solution(objective=objec, solution=raw_solution)), + stop_callback_fn=stop_callback_fn, + log=log + ) + + obj, sol, overlap = map_raw_solution(objective=objective, solution=solution) + log.info(f"VRP solved with points (without depot): {sum([len(s.hisa_ids) - 1 for s in sol])}") + return obj, sol, overlap diff --git a/admiral-worker/app/algorithms/solver_or.py b/admiral-worker/app/algorithms/solver_or.py new file mode 100644 index 0000000..20ff503 --- /dev/null +++ b/admiral-worker/app/algorithms/solver_or.py @@ -0,0 +1,554 @@ +import math +import sys +import weakref +from collections import defaultdict +from dataclasses import dataclass +from functools import partial +from threading import Timer +from typing import Callable + +import pandas as pd +from ortools.constraint_solver import pywrapcp, routing_enums_pb2 +from pandas import DataFrame + +from core.types.Logger import Logger + +VEHICLE_COST = 16 * 3600 # Two working days. +VEHICLE_PRIORITY_COST = 0 # Vehicle with priority has zero cost. +VEHICLE_DUPLICATE_COST = 100_000_000 +VEHICLE_DUPLICATE_FACTOR = 2 +""" + id name route_type ... cost max_time range +0 0 Kolo z pomožnim motorjem kpm ... 57600 3600 60000 +1 1 Motorno kolo mk ... 0 3600 120000 +2 2 Kolo z motorjem km ... 57600 3600 120000 +3 3 Kolo bike ... 57600 3600 30000 +4 4 Elektricni tro/štiri kolesnik ev ... 57600 3600 120000 +5 5 Pes foot ... 57600 3600 6000 +6 6 Avtomobil car ... 57600 3600 150000 +7 0 Kolo z pomožnim motorjem kpm ... 100000000 3600 60000 +8 1 Motorno kolo mk ... 100000000 3600 120000 +9 2 Kolo z motorjem km ... 100000000 3600 120000 +10 3 Kolo bike ... 100000000 3600 30000 +11 4 Elektricni tro/štiri kolesnik ev ... 100000000 3600 120000 +12 5 Pes foot ... 100000000 3600 6000 +13 6 Avtomobil car ... 100000000 3600 150000 +""" + + +@dataclass +class VrpInstance: + """ + Main "Instance" of the data to optimize + """ + vehicles: pd.DataFrame + nodes: pd.DataFrame + dist: dict + time: dict + initial_routes: list[list[int]] + district_percentage: float + +def read_solution( + manager: pywrapcp.RoutingIndexManager, + routing: pywrapcp.RoutingModel, + instance: VrpInstance, + distance_evaluators: dict[callable], + time_evaluators: dict[callable], +): + routes = [] + + for vehicle_id, route_type in enumerate(instance.vehicles["route_type"]): + distance_evaluator = distance_evaluators[route_type] + time_evaluator = time_evaluators[route_type] + + points = [] + route_distance = 0 + route_time = 0 + route_cost = 0 + + index = routing.Start(vehicle_id) + while not routing.IsEnd(index): + previous_index = index + index = routing.NextVar(index).Value() + + route_distance += distance_evaluator(previous_index, index) + route_time += time_evaluator(previous_index, index) + route_cost += routing.GetArcCostForVehicle(previous_index, index, vehicle_id) + + node = manager.IndexToNode(index) + point = instance.nodes.base_point.iloc[node] + points.append(point) + + routes.append( + { + "vehicle": vehicle_id, + "type": instance.vehicles.iloc[vehicle_id]["route_type"], + "route": points, + "total_distance": route_distance, + "total_time": route_time, + "total_cost": route_cost, + "num_points": len(points), + } + ) + + routes = pd.DataFrame(routes) + return routes + + +class RepeatTimer(Timer): + def run(self): + while not self.finished.wait(self.interval): + self.function() + + +class SolutionCallback: + def __init__( + self, + manager: pywrapcp.RoutingIndexManager, + model: pywrapcp.RoutingModel, + instance: VrpInstance, + distance_evaluators: dict[callable], + time_evaluators: dict[callable], + solution_callback_fn: Callable[[int, pd.DataFrame], None], + stop_callback_fn: callable + ): + self._routing_manager_ref = weakref.ref(manager) + self._routing_model_ref = weakref.ref(model) + self.objectives = [] + + self.instance = instance + self.distance_evaluators = distance_evaluators + self.time_evaluators = time_evaluators + + self.best_routes = None + + self.solution_callback_fn = solution_callback_fn + self.stop_callback_fn = stop_callback_fn + + self._timer = RepeatTimer(10, self._check_terminated) + self._timer.start() + + def __call__(self): + # current objective value + objective = int(self._routing_model_ref().CostVar().Value()) + if not self.objectives or objective < self.objectives[-1]: + self.objectives.append(objective) + + self.best_routes = read_solution( + self._routing_manager_ref(), self._routing_model_ref(), self.instance, self.distance_evaluators, self.time_evaluators + ) + + tmp = self.best_routes + tmp = tmp[tmp["num_points"] > 2] + + vpd = defaultdict(set) + districts = self.instance.nodes['district'].values + for _, row in tmp.iterrows(): + for p in row['route']: + vpd[districts[p]].add(row['vehicle']) + + self.solution_callback_fn(objective, self.best_routes) + + # Num. clean districts: {sum(len(s) == 1 for s in vpd.values())} / {len(vpd.keys())} ") + # log.info(f"Objective: {objective} Num. vehicles: {len(tmp)}") + + # self._routing_model_ref().solver().FinishCurrentSearch() + + def _check_terminated(self): + """ + if self.stop_callback_fn(None): + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + TypeError: SolvesallOptimizationService.vrpOptimization..stop_callback_fn() takes 0 positional arguments but 1 was given + """ + if self.stop_callback_fn(): + self._timer.cancel() + self._routing_model_ref().solver().FinishCurrentSearch() + + +def solve(instance: VrpInstance, config, time_limit_sec, solution_callback_fn: Callable[[int, pd.DataFrame], None], stop_callback_fn, log: Logger, log_search=False): + # with open(f"solve_args_{datetime.now().isoformat()}.pkl", "wb") as f: + # pickle.dump((instance, config), f) + sys.stdout.flush() + assert config.objective in ['distance', 'time'] + assert instance.nodes.iloc[0]["type"] == "depot", "Depot is expected to be at 0" + + manager = pywrapcp.RoutingIndexManager( + len(instance.nodes), len(instance.vehicles), 0 + ) + routing = pywrapcp.RoutingModel(manager) + + def create_distance_evaluator(route_type, instance): + dist_mat = instance.dist[route_type] + base_point = instance.nodes["base_point"].values + freq = instance.nodes['freq'].values + + def distance_evaluator(from_node, to_node): + dst_node = manager.IndexToNode(to_node) + src = base_point[manager.IndexToNode(from_node)] + dst = base_point[manager.IndexToNode(to_node)] + return round(dist_mat[src, dst]) + + return distance_evaluator + + distance_evaluators, distance_evaluators_index = {}, {} + for route_type in instance.vehicles["route_type"].unique(): + distance_evaluators[route_type] = create_distance_evaluator(route_type, instance) + distance_evaluators_index[route_type] = routing.RegisterTransitCallback( + distance_evaluators[route_type] + ) + + def create_time_evaluator(route_type, instance): + dist_mat = instance.dist[route_type] + time_mat = instance.time[route_type] + + base_point = instance.nodes["base_point"].values + service_time = instance.nodes["service_time"].values + freq = instance.nodes['freq'].values + hisa_ids = instance.nodes['hisa_id'].values + + def time_evaluator(from_node, to_node): + src_node = manager.IndexToNode(from_node) + dst_node = manager.IndexToNode(to_node) + src = base_point[manager.IndexToNode(from_node)] + dst = base_point[manager.IndexToNode(to_node)] + src_hisa_id = hisa_ids[src] + dst_hisa_id = hisa_ids[dst] + + # THIS MUST BE IN SYNC WITH Run_optimization_job.save WHERE OPTIMIZATION ROUTE IS CALCULATED!!! + time = round(time_mat[src, dst] + freq[src_node] * service_time[src_node]) + # log.info(f"({src} -> {dst} [{src_hisa_id} -> {dst_hisa_id}] [distance={dist_mat[src, dst]} time={time_mat[src, dst]} freq={freq[src_node]} service_time={service_time[src_node]}] = {time}") + return time + + return time_evaluator + + time_evaluators, time_evaluators_index = {}, {} + for route_type in instance.vehicles["route_type"].unique(): + time_evaluators[route_type] = create_time_evaluator(route_type, instance) + time_evaluators_index[route_type] = routing.RegisterTransitCallback( + time_evaluators[route_type] + ) + + def create_demand_evaluator(instance): + demands = instance.nodes["demand"].values + + def demand_evaluator(from_node): + return int(demands[manager.IndexToNode(from_node)]) + + return demand_evaluator + + demand_evaluator = create_demand_evaluator(instance) + demand_evaluator_index = routing.RegisterUnaryTransitCallback(demand_evaluator) + + routing.AddDimensionWithVehicleTransitAndCapacity( + [ + distance_evaluators_index[route_type] + for route_type in instance.vehicles["route_type"] + ], + 0, + [1000000] * len(instance.vehicles), + # [int(x) for x in instance.vehicles["range"]] if not config.set_initial else [1000000] * len(instance.vehicles), + True, + "Distance", + ) + """ + With initial solution we must be aware that is in the feasable space. + If it is not in the feasable space the solver can fail because it does not find an initial solution. + That's why we will increase the vehicle time constraint to 10 hours, and create a soft penalty. + On initial routes max_time constraint on vehicle is overacheived. + """ + routing.AddDimensionWithVehicleTransitAndCapacity( + [ + time_evaluators_index[route_type] + for route_type in instance.vehicles["route_type"] + ], + 0, + [int(x) for x in instance.vehicles["max_time"]] if not config.set_initial else [1000 * 3600] * len(instance.vehicles), + True, + "Time", + ) + + + + routing.AddConstantDimension(1, len(instance.nodes), True, "Count") + + count_dimension = routing.GetDimensionOrDie("Count") + for vehicle_id in range(len(instance.vehicles)): + if instance.vehicles.iloc[vehicle_id]['cost'] == 0: + index_end = routing.End(vehicle_id) + count_dimension.SetCumulVarSoftLowerBound(index_end, 3, 1_000_000_000) + routing.SetVehicleUsedWhenEmpty(True, vehicle_id) + + if config.set_initial: + time_dimension = routing.GetDimensionOrDie('Time') + for vehicle_id in range(len(instance.vehicles)): + index = routing.End(vehicle_id) + max_time = int(instance.vehicles.iloc[vehicle_id]['max_time']) + time_dimension.SetCumulVarSoftUpperBound(index, max_time, 1_000) + + routing.AddDimensionWithVehicleCapacity( + demand_evaluator_index, + 0, + [1000000] * len(instance.vehicles), + # [int(x) for x in instance.vehicles["capacity"]], + True, + "Capacity", + ) + + # District matching + if config.set_initial: + log.info("District matching ..") + node_to_vehicle = {} + district_size = {} + for v, route in enumerate(instance.initial_routes): + for n in route: + node_to_vehicle[n] = v + district_size[v] = len(route) + + def district_added_callback(vehicle_id, from_index): + from_node = manager.IndexToNode(from_index) + + if from_node == 0: # If node == 0, then it is depo. + return 1 + # Check if node not belongs to vehicle's initial district + return 1 if vehicle_id != node_to_vehicle[from_node] else 0 + + def district_required_callback(vehicle_id, from_index): + from_node = manager.IndexToNode(from_index) + + if from_node == 0: # If node == 0, then it is depo. + return 1 + # Check if node belongs to vehicle's initial district + return 1 if vehicle_id == node_to_vehicle[from_node] else 0 + + routing.AddDimensionWithVehicleTransitAndCapacity( + [routing.RegisterUnaryTransitCallback(partial(district_added_callback, vehicle_id)) + for vehicle_id in range(len(instance.vehicles)) + ], + 0, + [len(instance.nodes)] * len(instance.vehicles), + True, + "District_added", + ) + + routing.AddDimensionWithVehicleTransitAndCapacity( + [routing.RegisterUnaryTransitCallback(partial(district_required_callback, vehicle_id)) + for vehicle_id in range(len(instance.vehicles)) + ], + 0, + [len(instance.nodes)] * len(instance.vehicles), + True, + "District_required", + ) + + district_added_dimension = routing.GetDimensionOrDie('District_added') + district_required_dimension = routing.GetDimensionOrDie('District_required') + + # Add soft lower bound for each vehicle + for vehicle_id in range(len(instance.vehicles)): + if vehicle_id not in district_size: + continue + # len(IR) * (1 - 0.8 (GASPER)) + added_visits = int(district_size[vehicle_id] * (1 - instance.district_percentage)) # 80 % of district size + index = routing.End(vehicle_id) + district_added_dimension.SetCumulVarSoftUpperBound(index, added_visits, 10_000) + district_required_dimension.SetCumulVarSoftLowerBound(index, 3, 10_000) # District must contains 3 initial points + + # One vehicle per street (or district) + + # if config.district_mode == 'single' and config.district_penalty > 0: + # for _, ids in instance.nodes.groupby('district')['id']: + # ids = [manager.NodeToIndex(x) for x in ids.values] + # assert 0 not in ids, "Depot can't have an assigned district." + # routing.AddSoftSameVehicleConstraint(ids, config.district_penalty) + # elif config.district_mode == 'subsets' and config.district_penalty > 0: + # for _, ids in instance.nodes.groupby('district')['id']: + # ids = [manager.NodeToIndex(x) for x in ids.values] + # assert 0 not in ids, "Depot can't have an assigned district." + # log.info("Building pairwise constraints ...", end="") + ## sys.stdout.flush() + # combs = list(itertools.combinations(ids, 2))[:40] + # combs.append(ids) + # for subset in combs: + # routing.AddSoftSameVehicleConstraint(subset, config.district_penalty) + # log.info("finished") + # elif config.district_mode == 'hard': + # solver = routing.solver() + # for _, ids in instance.nodes.groupby('district')['id']: + # ids = [manager.NodeToIndex(x) for x in ids.values] + # + # v0 = routing.VehicleVar(ids[0]) + # for i in ids[1:]: + # solver.Add(v0 == routing.VehicleVar(i)) + def create_objective_evaluator(route_type, instance): + dist_mat = instance.dist[route_type] + time_mat = instance.time[route_type] + + base_point = instance.nodes["base_point"].values + service_time = instance.nodes["service_time"].values + freq = instance.nodes['freq'].values + hisa_ids = instance.nodes['hisa_id'].values + + def objective_evaluator(from_node, to_node): + src_node = manager.IndexToNode(from_node) + dst_node = manager.IndexToNode(to_node) + src = base_point[manager.IndexToNode(from_node)] + dst = base_point[manager.IndexToNode(to_node)] + src_hisa_id = hisa_ids[src] + dst_hisa_id = hisa_ids[dst] + + # THIS MUST BE IN SYNC WITH Run_optimization_job.save WHERE OPTIMIZATION ROUTE IS CALCULATED!!! + if dist_mat[src, dst] > 3000: + penalty = dist_mat[src, dst] + else: + distance = dist_mat[src, dst] + max_distance_sqrt = math.sqrt(3000) + penalty = (distance / max_distance_sqrt) ** 2 + if config.useDistrictCentrality: + total_cost = round(time_mat[src, dst] + freq[src_node] * service_time[src_node] + penalty) + else: + total_cost = round(time_mat[src, dst] + freq[src_node] * service_time[src_node]) + # log.info(f"({src} -> {dst} [{src_hisa_id} -> {dst_hisa_id}] [distance={dist_mat[src, dst]} time={time_mat[src, dst]} freq={freq[src_node]} service_time={service_time[src_node]}] = {time}") + return total_cost + + return objective_evaluator + + objective_evaluators, objective_evaluators_index = {}, {} + for route_type in instance.vehicles["route_type"].unique(): + objective_evaluators[route_type] = create_objective_evaluator(route_type, instance) + objective_evaluators_index[route_type] = routing.RegisterTransitCallback( + objective_evaluators[route_type] + ) + # Objective + if config.objective == 'distance': + obj_evaluators_index = distance_evaluators_index + obj_dimension = routing.GetDimensionOrDie('Distance') + elif config.objective == 'time': + obj_evaluators_index = time_evaluators_index + obj_dimension = routing.GetDimensionOrDie('Time') + obj_evaluators_index = objective_evaluators_index + # sum of distances (or travel times) + for i, route_type in enumerate(instance.vehicles["route_type"]): + routing.SetArcCostEvaluatorOfVehicle(obj_evaluators_index[route_type], i) + + # diff between max and min distance (or travel time) + # obj_dimension.SetGlobalSpanCostCoefficient(100) + + # cost per each used vehicle + for i, cost in enumerate(instance.vehicles["cost"]): + routing.SetFixedCostOfVehicle(int(cost), i) + + solution_callback = SolutionCallback(manager, routing, instance, distance_evaluators, time_evaluators, solution_callback_fn, stop_callback_fn) + routing.AddAtSolutionCallback(solution_callback) + + search_parameters = pywrapcp.DefaultRoutingSearchParameters() + search_parameters.first_solution_strategy = ( + routing_enums_pb2.FirstSolutionStrategy.LOCAL_CHEAPEST_COST_INSERTION + ) + search_parameters.local_search_metaheuristic = ( + routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH + ) + search_parameters.time_limit.FromSeconds(time_limit_sec) + search_parameters.log_search = log_search + + if config.set_initial: + log.info("Initial solution added.") + routing.CloseModelWithParameters(search_parameters) + initial_solution = routing.ReadAssignmentFromRoutes(instance.initial_routes, True) + assert initial_solution is not None, "Initial solution is not feasible." + log.info("Initial solution found!") + + solution = routing.SolveFromAssignmentWithParameters( + initial_solution, search_parameters + ) + else: + solution = routing.SolveWithParameters(search_parameters) + + # Stop callback timer sice we dont need it anymore + solution_callback._timer.cancel() + + assert solution, "No solution found." + if log_search: + debug_solution(instance.vehicles, instance.nodes, manager, routing, solution, log) + obj, sol = solution.ObjectiveValue(), solution_callback.best_routes + if config.set_initial: + debug_solution_overrlapping(instance.initial_routes, sol, log) + return obj, sol + + +def debug_solution(vehicles, points, manager, routing, solution, log: Logger): + objectiveValue: float = solution.ObjectiveValue() + distanceDimension = routing.GetMutableDimension("Distance") + timeDimension = routing.GetMutableDimension("Time") + + log.info(f"Objective value: {objectiveValue}") + + total_time = 0 + total_distance = 0 + total_cost = 0 + for vehicle_idx in range(len(vehicles)): + # add first node + index = routing.Start(vehicle_idx) + node = manager.IndexToNode(index) + point = points.iloc[node].to_dict() + + log.info(f"Route for vehicle {vehicle_idx} = {vehicles.iloc[vehicle_idx].to_dict()}:") + route_time = 0 + route_distance = 0 + route_cost = 0 + start = True + + while not routing.IsEnd(index): + # log.info(f"\t{node} = {point}") + + # Previous info + ctime = solution.Value(timeDimension.CumulVar(index)) + cdistance = solution.Value(distanceDimension.CumulVar(index)) + + # Next index + previous_index = index + index = solution.Value(routing.NextVar(index)) + + # Next info + ntime = solution.Value(timeDimension.CumulVar(index)) + ndistance = solution.Value(distanceDimension.CumulVar(index)) + + time = ntime - ctime + distance = ndistance - cdistance + cost = routing.GetArcCostForVehicle(previous_index, index, vehicle_idx) + if start: + log.info(f"STARTING COST: {cost}") + start = False + + # log.info(f"\tCurrent time: {round(time / 3600, 3)}h") + # log.info(f"\tCurrent distance: {round(distance, 3)}m") + # log.info(f"\tCurrent cost: {round(cost / 3600, 3)}\n") + + route_time += time + route_distance += distance + route_cost += cost + + node = manager.IndexToNode(index) + point = points.iloc[node].to_dict() + + # log.info(f"\t{node} = {point}") + log.info(f"Route time: {round(route_time / 3600, 3)}h") + log.info(f"Route distance: {round(route_distance, 3)}m") + log.info(f"Route cost: {round(route_cost, 3)}\n") + + total_time += route_time + total_distance += route_distance + total_cost += route_cost + + log.info(f"\nAll routes time: {round(total_time / 3600, 3)}h") + log.info(f"All routes distance: {round(total_distance, 3)}m") + log.info(f"All routes cost: {round(total_cost, 3)}") + +def debug_solution_overrlapping(initial_routes: list[list[int]], solution: DataFrame, log: Logger): + for id, vehicle, type, route, total_distance, total_time, total_cost, num_points in solution.to_records(): + if len(initial_routes) == id: + break + initial_route = set(initial_routes[id]) + route = set(route) + crosSection = initial_route.intersection(route) + if len(initial_route) > 0: + log.info(f"Vehicle {id}. overlappings: {round(100 * len(crosSection) / len(initial_route), 1)}%") diff --git a/admiral-worker/app/repos/sql/OptimizationMetricsSqlRepo.py b/admiral-worker/app/repos/sql/OptimizationMetricsSqlRepo.py new file mode 100644 index 0000000..1459384 --- /dev/null +++ b/admiral-worker/app/repos/sql/OptimizationMetricsSqlRepo.py @@ -0,0 +1,71 @@ +from dataclasses import dataclass + +from sqlalchemy import Engine, BLOB +from sqlalchemy import PrimaryKeyConstraint +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self, Optional + +from app.repos.sql import dbRetry +from core import Utils +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationMetrics import OptimizationMetrics +from core.repos.OptimizationMetricsRepo import OptimizationMetricsRepo +from core.types.Id import Id + + +@dataclass +class OptimizationMetricsSqlRepo(OptimizationMetricsRepo): + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "optimization_metrics" + + __table_args__ = (PrimaryKeyConstraint("optimization_id", "solution", "created_at"),) + + optimization_id: str = Field(foreign_key="optimization.id") + solution: int + vehicles: int + cost: float + distance: float + duration: float + created_at: int + overlapping: str = Field(sa_type=BLOB) + + @classmethod + def toRow(cls, obj: OptimizationMetrics) -> Self: + return cls( + optimization_id=obj.optimizationId.value, + solution=obj.solution, + cost=obj.cost, + vehicles=obj.vehicles, + distance=obj.distance, + duration=obj.duration, + created_at=obj.createdAt.timestamp(), + overlapping=Utils.json_dumps(obj.overlapping).encode('ascii') if obj.overlapping is not None else None, + ) + + @override + def getAll(self) -> list[OptimizationMetrics]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[OptimizationMetrics]) -> Optional[OptimizationMetrics]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationMetrics]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(optimization_id=optimizationId.value) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + @dbRetry + def post(self, optimizationMetrics: OptimizationMetrics): + with Session(self.db) as conn: + conn.add(self.Table.toRow(optimizationMetrics)) + conn.commit() diff --git a/admiral-worker/app/repos/sql/OptimizationResultSqlRepo.py b/admiral-worker/app/repos/sql/OptimizationResultSqlRepo.py new file mode 100644 index 0000000..e3dc57d --- /dev/null +++ b/admiral-worker/app/repos/sql/OptimizationResultSqlRepo.py @@ -0,0 +1,117 @@ +import json +import uuid +from dataclasses import dataclass + +from sqlalchemy import Engine, BLOB, text +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self, Optional + +from app.repos.sql import dbRetry +from core import Utils +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.OptimizationResult import OptimizationResult +from core.domain.optimization.OptimizationRoute import OptimizationRoute +from core.repos.OptimizationResultRepo import OptimizationResultRepo +from core.types.Id import Id + + +@dataclass +class OptimizationResultSqlRepo(OptimizationResultRepo): + + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "optimization_result" + + id: str = Field(primary_key=True) + optimization_id: str = Field(foreign_key="optimization.id") + routes: str = Field(sa_type=BLOB) + unvisited: str = Field(sa_type=BLOB) + created_at: int + info: str + authorized_by_user_id: str + parent: str + + def toDomain(self) -> OptimizationResult: + routes = [OptimizationRoute.fromJson(**x) for x in json.loads(self.routes.decode('utf-8'))] + unvisited = [OptimizationPoint.fromJson(**x) for x in json.loads(self.unvisited.decode('utf-8'))] if self.unvisited is not None else [] + return OptimizationResult( + optimizationId=Id(value=uuid.UUID(self.optimization_id)), + routes=routes, + unvisited=unvisited, + createdAt=self.created_at, + info=self.info, + authorizedByUserId=self.authorized_by_user_id, + parent=Id(value=uuid.UUID(self.parent)) if self.parent is not None else None, + id=Id(value=uuid.UUID(self.id)), + ) + + @classmethod + def toRow(cls, obj: OptimizationResult) -> Self: + return cls( + optimization_id=obj.optimizationId.value, + routes=Utils.json_dumps(obj.routes).encode('ascii'), + unvisited=Utils.json_dumps(obj.unvisited).encode('ascii') if obj.unvisited is not None else None, + created_at=obj.createdAt, + info=obj.info, + authorized_by_user_id=obj.authorizedByUserId, + parent=obj.parent.value if obj.parent is not None else None, + id=obj.id.value, + ) + + @override + def getAll(self) -> list[OptimizationResult]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[OptimizationResult]) -> Optional[OptimizationResult]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationResult]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(optimization_id=optimizationId.value) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + @dbRetry + def post(self, optimizationResult: OptimizationResult) -> OptimizationResult: + with Session(self.db) as conn: + conn.merge(self.Table.toRow(optimizationResult)) + conn.commit() + return optimizationResult + + @override + def getLatestByOptimizationId(self, optimizationId: Id[Optimization]) -> Optional[OptimizationResult]: + with Session(self.db) as conn: + query = select( + self.Table + ).order_by( + self.Table.created_at.desc() + ).limit( + 1 + ).filter_by(optimization_id=optimizationId.value) + + row = conn.exec(query).one_or_none() + if row is None: + return None + + return row.toDomain() + + @override + def getAllIds(self) -> list[Id[OptimizationResult]]: + query = text(f""" + select optimization_result.id from optimization_result + join optimization o on o.id = optimization_result.optimization_id + where state not in ('TEST', 'DELETED') + """) + + with (Session(self.db) as conn): + results = conn.exec(query).all() + return [Id(value=row[0]) for row in results] diff --git a/admiral-worker/app/repos/sql/OptimizationSqlRepo.py b/admiral-worker/app/repos/sql/OptimizationSqlRepo.py new file mode 100644 index 0000000..d08afc2 --- /dev/null +++ b/admiral-worker/app/repos/sql/OptimizationSqlRepo.py @@ -0,0 +1,139 @@ +from datetime import datetime, timedelta +import uuid +from dataclasses import dataclass +from typing import Optional + +from sqlalchemy import Engine, update +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override + +from app.repos.sql import dbRetry +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationState import OptimizationState +from core.domain.optimization.OptimizationType import OptimizationType +from core.repos.OptimizationRepo import OptimizationRepo +from core.types.Id import Id +from core.types.IntId import IntId + + +@dataclass +class OptimizationSqlRepo(OptimizationRepo): + + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "optimization" + + id: str = Field(primary_key=True) + posta: int + title: str + description: str + optimization_time: float + weight: int + dates: str + created_at: int + authorized_by_user_id: str + state_changed_at: int + use_frequency: bool + use_unvisited_crn: bool + district_centering: bool + static_service_times: int + state: str + type: str + parent: str + + def toDomain(self) -> Optimization: + return Optimization( + posta=IntId(value=self.posta), + title=self.title, + description=self.description, + weight=self.weight, + dates=[datetime.fromtimestamp(int(date)).date() for date in self.dates.split(",") if date.isnumeric()], + optimizationTime=timedelta(seconds=self.optimization_time), + createdAt=datetime.fromtimestamp(self.created_at), + authorizedByUserId=self.authorized_by_user_id, + state=OptimizationState(self.state), + type=OptimizationType(self.type), + useFrequency=self.use_frequency, + useUnvisitedCrn=self.use_unvisited_crn, + useDistrictCentrality=self.district_centering, + stateChangedAt=datetime.fromtimestamp(self.state_changed_at), + staticServiceTimes=self.static_service_times, + parent=Id(value=uuid.UUID(self.parent)) if self.parent is not None else None, + id=Id(value=uuid.UUID(self.id)), + ) + + @override + def getAll(self) -> list[Optimization]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[Optimization]) -> Optional[Optimization]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + def getWithState(self, state: OptimizationState) -> list[Optimization]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(state=state.value) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + @dbRetry + def updateFirst(self, fromState: OptimizationState, toState: OptimizationState) -> Optional[Optimization]: + with Session(self.db) as conn: + + # Get candidate for update + selectQuery = select(self.Table).filter_by(state=fromState.value).limit(1) + row = conn.exec(selectQuery).one_or_none() + if row is None: + return None + + # Update candidate but only if his state is still unchanged + updateQuery = update(self.Table).filter_by(state=fromState.value, id=row.id).values(state=toState.value, + state_changed_at=datetime.now().timestamp()) + + # If candidate was updated before this update break the transaction + if conn.exec(updateQuery).rowcount != 1: + return None + + # Again get updated candidate + selectQuery = select(self.Table).filter_by(id=row.id).limit(1) + row = conn.exec(selectQuery).one_or_none() + if row is None: + return None + + # Commit changes + conn.commit() + return row.toDomain() + + @override + @dbRetry + def setState(self, id: Id[Optimization], toState: OptimizationState) -> Optional[Optimization]: + with Session(self.db) as conn: + updateQuery = update(self.Table).filter_by(id=id.value).values(state=toState.value, state_changed_at=datetime.now().timestamp()) + conn.exec(updateQuery) + conn.commit() + + @override + def getLatestConfirmedByPosta(self, posta: int) -> Optional[Optimization]: + with (Session(self.db) as conn): + query = select( + self.Table + ).order_by( + self.Table.state_changed_at.desc() + ).limit( + 1 + ).filter_by( + posta=posta, state=OptimizationState.CONFIRMED.value + ) + + row = conn.exec(query).one_or_none() + if row is None: + return None + + return row.toDomain() diff --git a/admiral-worker/app/repos/sql/OptimizationVehicleSqlRepo.py b/admiral-worker/app/repos/sql/OptimizationVehicleSqlRepo.py new file mode 100644 index 0000000..a70ce41 --- /dev/null +++ b/admiral-worker/app/repos/sql/OptimizationVehicleSqlRepo.py @@ -0,0 +1,70 @@ +import json +import uuid +from dataclasses import dataclass +from typing import Optional + +from sqlalchemy import Engine +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationVehicle import OptimizationVehicle +from core.domain.optimization.TransportMode import TransportMode +from core.repos.OptimizationVehicleRepo import OptimizationVehicleRepo +from core.types.Id import Id + + +@dataclass +class OptimizationVehicleSqlRepo(OptimizationVehicleRepo): + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "optimization_vehicle" + + id: str = Field(primary_key=True) + optimization_id: str = Field(foreign_key="optimization.id") + name: str + type: str + capacity: int + range: float # kilometers + min_quantity: int + max_quantity: int + delivery_time: float # hours + average_speed: float + max_speed: float + districts: str + + def toDomain(self) -> OptimizationVehicle: + return OptimizationVehicle( + optimizationId=Id(value=uuid.UUID(self.optimization_id)), + name=self.name, + type=TransportMode(self.type), + capacity=self.capacity, + range=self.range, + minQuantity=self.min_quantity, + maxQuantity=self.max_quantity, + deliveryTime=self.delivery_time, + averageSpeed=self.average_speed, + maxSpeed=self.max_speed, + districts=self.districts, + id=Id(value=uuid.UUID(self.id)) + ) + + @override + def getAll(self) -> list[OptimizationVehicle]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[OptimizationVehicle]) -> Optional[OptimizationVehicle]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationVehicle]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(optimization_id=optimizationId.value) + return [row.toDomain() for row in conn.exec(query).all()] diff --git a/admiral-worker/app/repos/sql/WorkerJobLogSqlRepo.py b/admiral-worker/app/repos/sql/WorkerJobLogSqlRepo.py new file mode 100644 index 0000000..2550d6c --- /dev/null +++ b/admiral-worker/app/repos/sql/WorkerJobLogSqlRepo.py @@ -0,0 +1,73 @@ +import uuid +from dataclasses import dataclass +from typing import Optional + +from sqlalchemy import Engine +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self + +from app.repos.sql import dbRetry +from core.domain.worker.WorkerJob import WorkerJob +from core.domain.worker.WorkerLog import WorkerLog +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.repos.WorkerJobLogRepo import WorkerJobLogRepo +from core.types.Id import Id + + +@dataclass +class WorkerJobLogSqlRepo(WorkerJobLogRepo): + + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "worker_job_log" + + id: str = Field(primary_key=True) + context: str + data: str + worker_job_id: str = Field(foreign_key="worker_job.id") + created_at: float + level: str + + def toDomain(self) -> WorkerLog: + return WorkerLog( + context=self.context, + data=self.data, + ownerId=Id(value=uuid.UUID(self.worker_job_id)), + createdAt=self.created_at, + level=WorkerLogLevel(self.level), + id=Id(value=uuid.UUID(self.id)) + ) + + @classmethod + def toRow(cls, obj: WorkerLog) -> Self: + return cls( + context=obj.context, + data=obj.data, + worker_job_id=obj.ownerId.value, + created_at=obj.createdAt, + level=obj.level.value, + id=obj.id.value, + ) + + @override + def getAll(self) -> list[WorkerLog]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + @dbRetry + def post(self, context: str, workerJobId: Id[WorkerJob], data: str, level: WorkerLogLevel) -> WorkerLog: + obj = WorkerLog(context=context, data=data, ownerId=workerJobId, level=level) + with Session(self.db) as conn: + conn.add(self.Table.toRow(obj)) + conn.commit() + return obj diff --git a/admiral-worker/app/repos/sql/WorkerJobSqlRepo.py b/admiral-worker/app/repos/sql/WorkerJobSqlRepo.py new file mode 100644 index 0000000..d0a3be5 --- /dev/null +++ b/admiral-worker/app/repos/sql/WorkerJobSqlRepo.py @@ -0,0 +1,67 @@ +import uuid +from dataclasses import dataclass +from typing import Optional + +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self +from sqlalchemy.engine import Engine + +from app.repos.sql import dbRetry +from core.domain.optimization.OptimizationState import OptimizationState +from core.domain.worker.WorkerJob import WorkerJob +from core.repos.WorkerJobRepo import WorkerJobRepo +from core.types.Id import Id + + +@dataclass +class WorkerJobSqlRepo(WorkerJobRepo): + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "worker_job" + + id: str = Field(primary_key=True) + optimization_id: str = Field(foreign_key="optimization.id") + worker_id: str = Field(foreign_key="worker.id") + name: str + state: str + + def toDomain(self) -> WorkerJob: + return WorkerJob( + id=Id(value=uuid.UUID(self.id)), + optimizationId=Id(value=uuid.UUID(self.optimization_id)), + workerId=Id(value=uuid.UUID(self.worker_id)), + name=self.name, + state=OptimizationState(self.state) + ) + + @classmethod + def toRow(cls, obj: WorkerJob) -> Self: + return cls( + id=obj.id.value, + optimization_id=obj.optimizationId.value, + worker_id=obj.workerId.value, + name=obj.name, + state=obj.state.value, + ) + + @override + def getAll(self) -> list[WorkerJob]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[WorkerJob]) -> Optional[WorkerJob]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + @dbRetry + def post(self, obj: WorkerJob): + with Session(self.db) as conn: + conn.add(self.Table.toRow(obj)) + conn.commit() + return obj diff --git a/admiral-worker/app/repos/sql/WorkerJobStatusSqlRepo.py b/admiral-worker/app/repos/sql/WorkerJobStatusSqlRepo.py new file mode 100644 index 0000000..8a96ac1 --- /dev/null +++ b/admiral-worker/app/repos/sql/WorkerJobStatusSqlRepo.py @@ -0,0 +1,68 @@ +import uuid +from dataclasses import dataclass +from typing import Optional + +from sqlalchemy.engine import Engine +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self + +from app.repos.sql import dbRetry +from core.domain.worker.WorkerJobStatus import WorkerJobStatus +from core.domain.worker.WorkerJob import WorkerJob +from core.repos.WorkerJobStatusRepo import WorkerJobStatusRepo +from core.types.Id import Id + + +@dataclass +class WorkerJobStatusSqlRepo(WorkerJobStatusRepo): + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "worker_job_status" + + id: str = Field(primary_key=True) + worker_job_id: str = Field(foreign_key="worker_job.id") + ram_taken: float + cpu_utilization: float + created_at: int + + def toDomain(self) -> WorkerJobStatus: + return WorkerJobStatus( + ownerId=Id(value=uuid.UUID(self.worker_job_id)), + ramTaken=self.ram_taken, + cpuUtilization=self.cpu_utilization, + createdAt=self.created_at, + id=Id(value=uuid.UUID(self.id)) + ) + + @classmethod + def toRow(cls, obj: WorkerJobStatus) -> Self: + return cls( + worker_job_id=obj.ownerId.value, + ram_taken=obj.ramTaken, + cpu_utilization=obj.cpuUtilization, + created_at=obj.createdAt, + id=obj.id.value + ) + + @override + def getAll(self) -> list[WorkerJobStatus]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[WorkerJobStatus]) -> Optional[WorkerJobStatus]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + @dbRetry + def post(self, workerJobId: Id[WorkerJob], ramTaken: float, cpuUtilization: float) -> WorkerJobStatus: + obj = WorkerJobStatus(ownerId=workerJobId, ramTaken=ramTaken, cpuUtilization=cpuUtilization) + with Session(self.db) as conn: + conn.add(self.Table.toRow(obj)) + conn.commit() + return obj diff --git a/admiral-worker/app/repos/sql/WorkerLogSqlRepo.py b/admiral-worker/app/repos/sql/WorkerLogSqlRepo.py new file mode 100644 index 0000000..b5d9c3c --- /dev/null +++ b/admiral-worker/app/repos/sql/WorkerLogSqlRepo.py @@ -0,0 +1,72 @@ +import uuid +from dataclasses import dataclass +from typing import Optional + +from sqlalchemy.engine import Engine +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self + +from app.repos.sql import dbRetry +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerLog import WorkerLog +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.repos.WorkerLogRepo import WorkerLogRepo +from core.types.Id import Id + + +@dataclass +class WorkerLogSqlRepo(WorkerLogRepo): + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "worker_log" + + id: str = Field(primary_key=True) + context: str + data: str + worker_id: str = Field(foreign_key="worker.id") + level: str + created_at: float + + def toDomain(self) -> WorkerLog: + return WorkerLog( + context=self.context, + data=self.data, + ownerId=Id(value=uuid.UUID(self.worker_id)), + createdAt=self.created_at, + level=WorkerLogLevel(self.level), + id=Id(value=uuid.UUID(self.id)) + ) + + @classmethod + def toRow(cls, obj: WorkerLog) -> Self: + return cls( + context=obj.context, + data=obj.data, + worker_id=obj.ownerId.value, + created_at=obj.createdAt, + level=obj.level.value, + id=obj.id.value, + ) + + @override + def getAll(self) -> list[WorkerLog]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + @dbRetry + def post(self, context: str, workerId: Id[Worker], data: str, level: WorkerLogLevel) -> WorkerLog: + obj = WorkerLog(context=context, data=data, ownerId=workerId, level=level) + with Session(self.db) as conn: + conn.add(self.Table.toRow(obj)) + conn.commit() + return obj diff --git a/admiral-worker/app/repos/sql/WorkerSqlRepo.py b/admiral-worker/app/repos/sql/WorkerSqlRepo.py new file mode 100644 index 0000000..e482d55 --- /dev/null +++ b/admiral-worker/app/repos/sql/WorkerSqlRepo.py @@ -0,0 +1,83 @@ +import uuid +from dataclasses import dataclass +from typing import Optional + +from sqlalchemy import delete +from sqlalchemy.engine import Engine +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self + +from app.repos.sql import dbRetry +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerState import WorkerState +from core.domain.worker.WorkerType import WorkerType +from core.repos.WorkerRepo import WorkerRepo +from core.types.Id import Id + + +@dataclass +class WorkerSqlRepo(WorkerRepo): + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "worker" + + id: str = Field(primary_key=True) + type: str + ip: str + state: str + + def toDomain(self) -> Worker: + return Worker( + ip=self.ip, + type=WorkerType(self.type), + state=WorkerState(self.state), + id=Id(value=uuid.UUID(self.id)) + ) + + @classmethod + def toRow(cls, obj: Worker) -> Self: + return cls( + ip=obj.ip, + type=obj.type, + id=obj.id.value, + state=obj.state.value + ) + + @override + def getAll(self) -> list[Worker]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[Worker]) -> Optional[Worker]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + def post(self, ip: str, type: WorkerType) -> Worker: + worker = Worker(ip=ip, type=type, state=WorkerState.NORMAL) + with Session(self.db) as conn: + conn.add(self.Table.toRow(worker)) + conn.commit() + return worker + + @override + def getByIp(self, ip: str, type: WorkerType) -> Optional[Worker]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(ip=ip, type=type) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @override + @dbRetry + def deleteByIp(self, ip: str, type: WorkerType) -> int: + with Session(self.db) as conn: + query = delete(self.Table).filter_by(ip=ip, type=type) + result = conn.exec(query).rowcount + conn.commit() + + return result diff --git a/admiral-worker/app/repos/sql/WorkerStatusSqlRepo.py b/admiral-worker/app/repos/sql/WorkerStatusSqlRepo.py new file mode 100644 index 0000000..2d77e6d --- /dev/null +++ b/admiral-worker/app/repos/sql/WorkerStatusSqlRepo.py @@ -0,0 +1,69 @@ +import uuid +from dataclasses import dataclass +from typing import Optional +from typing_extensions import Self + +from sqlalchemy.engine import Engine +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override + +from app.repos.sql import dbRetry +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerStatus import WorkerStatus +from core.repos.WorkerStatusRepo import WorkerStatusRepo +from core.types.Id import Id + + +@dataclass +class WorkerStatusSqlRepo(WorkerStatusRepo): + db: Engine + + class Table(SQLModel, table=True): + __tablename__ = "worker_status" + + id: str = Field(primary_key=True) + worker_id: str = Field(foreign_key="worker.id") + ram_available: float + cpu_utilization: float + created_at: int + + def toDomain(self) -> WorkerStatus: + return WorkerStatus( + ownerId=Id(value=uuid.UUID(self.worker_id)), + ramAvailable=self.ram_available, + cpuUtilization=self.cpu_utilization, + createdAt=self.created_at, + id=Id(value=uuid.UUID(self.id)) + ) + + @classmethod + def toRow(cls, obj: WorkerStatus) -> Self: + return cls( + worker_id=obj.ownerId.value, + ram_available=obj.ramAvailable, + cpu_utilization=obj.cpuUtilization, + created_at=obj.createdAt, + id=obj.id.value + ) + + @override + def getAll(self) -> list[WorkerStatus]: + with Session(self.db) as conn: + query = select(self.Table) + return [row.toDomain() for row in conn.exec(query).all()] + + @override + def get(self, id: Id[WorkerStatus]) -> Optional[WorkerStatus]: + with Session(self.db) as conn: + query = select(self.Table).filter_by(id=id.value) + row = conn.exec(query).one_or_none() + return row.toDomain() if row is not None else None + + @dbRetry + @override + def post(self, workerId: Id[Worker], ramAvailable: float, cpuUtilization: float) -> WorkerStatus: + workerStatus = WorkerStatus(ownerId=workerId, ramAvailable=ramAvailable, cpuUtilization=cpuUtilization) + with Session(self.db) as conn: + conn.add(self.Table.toRow(workerStatus)) + conn.commit() + return workerStatus diff --git a/admiral-worker/app/repos/sql/__init__.py b/admiral-worker/app/repos/sql/__init__.py new file mode 100644 index 0000000..5975f6b --- /dev/null +++ b/admiral-worker/app/repos/sql/__init__.py @@ -0,0 +1,21 @@ +import logging +from collections.abc import Callable +from functools import wraps +from time import sleep +from sqlalchemy.exc import DBAPIError + +log = logging.getLogger(__name__) + + +def dbRetry(func: Callable, max_retries=60, delay=5, exceptions=(DBAPIError,)): + @wraps(func) + def wrapper(*args, **kwargs): + for i in range(max_retries): + try: + return func(*args, **kwargs) + except exceptions as e: + log.warning(f"DB function {func.__name__} attempt {i + 1} failed with: {e}") + sleep(delay) + raise Exception("DB operation retry limit reached!") + + return wrapper diff --git a/admiral-worker/app/services/FsFtpService.py b/admiral-worker/app/services/FsFtpService.py new file mode 100644 index 0000000..1ecbc80 --- /dev/null +++ b/admiral-worker/app/services/FsFtpService.py @@ -0,0 +1,52 @@ +import logging +import os +from dataclasses import dataclass +from pathlib import Path +import shutil +from typing_extensions import override + +from core.extend import fs +from core.services.FtpService import FtpService + +log = logging.getLogger(__name__) + + +@dataclass +class FsFtpService(FtpService): + + @override + def download(self, path: Path): + ftpPath = fs.getFtpPath(path.name) + log.info(f"Download: '{path.name}' to '{path}'") + shutil.copyfile(src=ftpPath, dst=path) + + @override + def upload(self, path: Path): + ftpPath = fs.getFtpPath(path.name) + log.info(f"Upload: '{path}' to '{path.name}'") + shutil.copyfile(src=path, dst=ftpPath) + + @override + def rename(self, oldPath: Path, newPath: Path): + newFtpPath = fs.getFtpPath(newPath.name) + oldFtpPath = fs.getFtpPath(oldPath.name) + log.info(f"Rename: '{oldPath.name}' to '{newPath.name}'") + shutil.move(src=oldFtpPath, dst=newFtpPath) + + @override + def delete(self, path: Path): + ftpPath = fs.getFtpPath(path.name) + log.info(f"Delete: '{path.name}'") + ftpPath.unlink(missing_ok=True) + + @override + def copy(self, path: Path, newPath: Path): + newFtpPath = fs.getFtpPath(newPath.name) + oldFtpPath = fs.getFtpPath(path.name) + log.info(f"Copy: '{path.name}' to '{newPath.name}'") + shutil.copyfile(src=oldFtpPath, dst=newFtpPath) + + @override + def scan(self) -> list[Path]: + ftpPath = fs.getFtpPath() + return list(ftpPath.iterdir()) diff --git a/admiral-worker/app/services/FtputilFtpService.py b/admiral-worker/app/services/FtputilFtpService.py new file mode 100644 index 0000000..2561703 --- /dev/null +++ b/admiral-worker/app/services/FtputilFtpService.py @@ -0,0 +1,77 @@ +import logging +import time +from dataclasses import dataclass +from pathlib import Path + +from ftputil import FTPHost +from typing_extensions import override + +from core.services.FtpService import FtpService + +log = logging.getLogger(__name__) + + +@dataclass +class FtputilFtpService(FtpService): + + domain: str + username: str + password: str + port: str + + class Progress: + def __init__(self, size: int): + self.allBytes: int = size + self.transferedBytes: int = 0 + self.startTime = time.time() + self.i = 0 + + def __call__(self, chunk: bytes): + self.i += 1 + + chunkBytes = len(chunk) + self.transferedBytes += chunkBytes + + if self.i % 10 == 0: + duration = time.time() - self.startTime + progress = self.transferedBytes / self.allBytes * 100 + numChunksLeft = (self.allBytes - self.transferedBytes) / chunkBytes + timeLeft = (duration * numChunksLeft) / 60 + log.info(", ".join([ + f"FTP progress: {round(progress, 1)}%", + f"Transferred: ({round(self.transferedBytes / 1e6, 1)} / {round(self.allBytes / 1e6, 1)}) MB", + f"Time left: {round(timeLeft, 2)} minutes" + ])) + + self.startTime = time.time() + + def __post_init__(self): + self.ftp = FTPHost(self.domain, self.username, self.password, self.port) + + @override + def download(self, path: Path): + log.info(f"Download: '{path.name}' to '{path}'") + # Download some files from the login directory. + self.ftp.download(source=path.name, target=path, callback=self.Progress(size=self.ftp.stat(path.name).st_size)) + + @override + def upload(self, path: Path): + log.info(f"Upload: '{path}' to '{path.name}'") + self.ftp.upload(source=path, target=path.name, callback=self.Progress(size=path.stat().st_size)) + + @override + def rename(self, oldPath: Path, newPath: Path): + log.info(f"Rename: '{oldPath.name}' to '{newPath.name}'") + self.ftp.rename(source=oldPath.name, target=newPath.name) + + @override + def delete(self, path: Path): + log.info(f"Delete: '{path.name}'") + self.ftp.remove(path=path.name) + + @override + def copy(self, path: Path, newPath: Path): + log.info(f"Copy: '{path.name}' to '{newPath.name}'") + with self.ftp.open(path.name, "rb") as source: + with self.ftp.open(newPath.name, "wb") as target: + self.ftp.copyfileobj(source=source, target=target) diff --git a/admiral-worker/app/services/OsrmRoutingService.py b/admiral-worker/app/services/OsrmRoutingService.py new file mode 100644 index 0000000..e9083fb --- /dev/null +++ b/admiral-worker/app/services/OsrmRoutingService.py @@ -0,0 +1,131 @@ +import logging +from random import random + +import numpy as np +import requests +from typing_extensions import override + +from core.domain.map.GeoLocation import GeoLocation +from core.domain.map.RouteInfo import RouteInfo +from core.domain.map.RouteMatrix import RouteMatrix +from core.domain.optimization.TransportMode import TransportMode +from core.services.RoutingService import RoutingService + +log = logging.getLogger(__name__) + + +class OsrmRoutingService(RoutingService): + + + def __init__(self, domain: str): + self.domain = domain + + def __getCoordinates(self, locations : list[GeoLocation]) -> str: + coordinates = [] + for location in locations: + coordinates.append(f"{location.lon},{location.lat}") + return ";".join(coordinates) + + @override + def getRouteMatrix(self, geoLocations: list[GeoLocation], transportMode: TransportMode) -> RouteMatrix: + coordinates = self.__getCoordinates(locations=[gl for gl in geoLocations]) + port, profile = self.__getProfile(transportMode=transportMode) + + res = requests.get(url=f"{self.domain}:{port}/table/v1/{profile}/{coordinates}", params=dict(annotations="distance,duration")) + + if res.status_code != 200: + raise Exception(f"OSRM routing engine failed to create matrix: {res.text}") + + matrixes = res.json() + + return RouteMatrix.init( + distances=np.matrix(matrixes['distances'], dtype=np.float32), + durations=np.matrix(matrixes['durations'], dtype=np.float32) + ) + + @override + def getRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation]) -> RouteInfo: + coordinates = self.__getCoordinates(locations=legs) + port, profile = self.__getProfile(transportMode=transportMode) + res = requests.get( + url=f"{self.domain}:{port}/route/v1/{profile}/{coordinates}", + params=dict( + geometries="geojson", + alternatives='false', + steps='true', + continue_straight='false', + ) + ) + if res.status_code != 200: + raise Exception(f"OSRM routing engine failed to find route: {res.text}") + + data = res.json() + route = data['routes'][0] + + steps = [legs[0]] + for i, leg in enumerate(route['legs']): + legSteps = [legs[i]] + for step in leg['steps']: + legSteps += [GeoLocation(lat=c[1], lon=c[0]) for c in step['geometry']['coordinates']] + legSteps.append(legs[i + 1]) + steps += legSteps + + return RouteInfo( + distance=route['distance'], + duration=route['duration'], + steps=steps + ) + + @override + def getAverageRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation], probability: list[float], iterations: int) -> RouteInfo: + averageDistance = 0 + averageDuration = 0 + for i in range(iterations): + randomLegs: list[GeoLocation] = [] + for leg in legs: + if random() < probability[legs.index(leg)]: + randomLegs.append(leg) + if len(randomLegs) < 2: + randomLegs = legs + routeInfo = self.getRouteInfo(transportMode=transportMode, legs=randomLegs) + averageDistance += routeInfo.distance + averageDuration += routeInfo.duration + + return RouteInfo( + distance=averageDistance / iterations, + duration=averageDuration / iterations, + steps=[] + ) + + def __getProfile(self, transportMode: TransportMode) -> tuple[int, str]: + match transportMode: + case TransportMode.BIKE: + return 5000, 'bike' + case TransportMode.CAR: + return 5001, 'car' + case TransportMode.EV: + return 5002, 'ev' + case TransportMode.KM: + return 5003, 'km' + case TransportMode.KPM: + return 5004, 'kpm' + case TransportMode.MK: + return 5005, 'mk' + case TransportMode.WALK: + return 5006, 'walk' + case _: + raise Exception(f"Mapping for transport mode does not exists: {transportMode.value}") + + def _getPolyline(self, transportMode: TransportMode, legs: list[GeoLocation]) -> str: + coordinates = ";".join([f"{l.lon},{l.lat}" for l in legs]) + port, profile = self.__getProfile(transportMode=transportMode) + res = requests.get( + url=f"{self.domain}:{port}/route/v1/{profile}/{coordinates}", + params=dict( + geometries="polyline", + alternatives='false', + steps='true', + continue_straight='false', + ) + ) + return res.json()['routes'][0]['geometry'] diff --git a/admiral-worker/app/services/PsutilSystemService.py b/admiral-worker/app/services/PsutilSystemService.py new file mode 100644 index 0000000..409bb58 --- /dev/null +++ b/admiral-worker/app/services/PsutilSystemService.py @@ -0,0 +1,59 @@ +import logging +from dataclasses import dataclass +from typing import Optional +from typing_extensions import override + +import psutil +import requests +from urllib3.exceptions import NameResolutionError + +from core.services.SystemService import SystemService + +log = logging.getLogger(__name__) + + +@dataclass +class PsutilSystemService(SystemService): + @override + def getMaxRamMbAvailable(self) -> float: + return psutil.virtual_memory().total / 10**6 + + @override + def getIp(self) -> str: + return requests.get('https://checkip.amazonaws.com').text.strip() + + @override + def getCpuUtilization(self) -> float: + return psutil.cpu_percent(interval=None) + + @override + def getRamMbAvailable(self) -> float: + return psutil.virtual_memory().available / 10**6 + + @override + def getCpuAvailable(self) -> int: + count = 0 + for utilization in psutil.cpu_percent(percpu=True): + if utilization < 25: + count += 1 + return count + + @override + def getProcessCpu(self, pid: int = None) -> Optional[float]: + proc = psutil.Process(pid=pid) + return proc.cpu_percent(interval=None) + + @override + def getProcessRam(self, pid: int = None) -> Optional[float]: + proc = psutil.Process(pid=pid) + return proc.memory_info().rss / 10**6 + + @override + def killProcess(self, pid: int = None): + proc = psutil.Process(pid=pid) + proc.kill() + + @override + def terminateProcess(self, pid: int = None): + proc = psutil.Process(pid=pid) + proc.terminate() diff --git a/admiral-worker/app/services/SolvesallOptimizationService.py b/admiral-worker/app/services/SolvesallOptimizationService.py new file mode 100644 index 0000000..c1046ef --- /dev/null +++ b/admiral-worker/app/services/SolvesallOptimizationService.py @@ -0,0 +1,550 @@ +from collections import Counter +from datetime import timedelta +from typing import Callable, Literal, Optional + +import numpy as np +from sklearn.neighbors import BallTree + +from app.algorithms.OrToolsOptimizationService import OrToolsOptimizationVehicle, OrToolsOptimizationPoint, OrToolsOptimizationInstance, \ + OrToolsOptimizationSolution, OrToolsOptimizationService, OrToolsOptimizationConfig +from core.Utils import percentage +from core.domain.map.CrnPoint import CrnPoint +from core.domain.map.GeoLocation import GeoLocation +from core.domain.map.RouteMatrix import RouteMatrix +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.OptimizationPointType import OptimizationPointType +from core.domain.optimization.OptimizationResultData import OptimizationResultData +from core.domain.optimization.OptimizationSolution import OptimizationSolution +from core.domain.optimization.OptimizationType import OptimizationType +from core.domain.optimization.OptimizationVehicle import OptimizationVehicle +from core.domain.optimization.TransportMode import TransportMode +from core.services.OptimizationService import OptimizationService +from core.types.Logger import Logger + + +class SolvesallOptimizationService(OptimizationService): + + def config(self, setInitial: bool, district_centering: bool) -> OrToolsOptimizationConfig: + return OrToolsOptimizationConfig( + district_mode='subsets', + district_penalty=0, + vehicle_cost=16 * 3600, # Two working days. + set_initial=setInitial, + useDistrictCentrality=district_centering, + ) + + def vrpOptimization( + self, + optimization: Optimization, + optimizationVehicles: list[OptimizationVehicle], + optimizationPoints: list[OptimizationPoint], + routeMatrices: dict[TransportMode, RouteMatrix], + solutionCallback: Callable[[int, list[OptimizationSolution], bool, list[OptimizationPoint], Optional[dict[int, float]]], None], + terminationCallback: Callable[[], bool], + log: Logger, + initialOptimizationResultData: Optional[OptimizationResultData] = None + ): + config = self.config(setInitial=initialOptimizationResultData is not None, district_centering=optimization.useDistrictCentrality) + crn_initialDistrict: dict[int, str] = {} + initialOptimizationPoints: list[OptimizationPoint] = [] + initialRoutePointBallTree: Optional[BallTree] = None + if config.set_initial: + log.info('Setting optimization mode to initial solution.') + log.info('Creating crn_initialDistrict map and initial optimization points ball tree.') + for initialRoute in initialOptimizationResultData.optimizationResult.routes: + for initialRoutePoint in initialRoute.points: + if initialRoutePoint.crnPoint.hisa != 0: + initialOptimizationPoints.append(initialRoutePoint) + crn_initialDistrict[initialRoutePoint.crnPoint.hisa] = initialRoute.name + initialRoutePointBallTree = BallTree([iop.crnPoint.location.ballVector for iop in initialOptimizationPoints], metric='haversine') + + log.info('Mapping optimization points') + orToolsOptimizationPoints: list[OrToolsOptimizationPoint] = [] + for i, point in enumerate(optimizationPoints): + # Construct OrToolsOptimizationPoint list + crnPoint = point.crnPoint + microLocation = crnPoint.microLocation + district = None if crnPoint.district == '' else crnPoint.district + orPoint = OrToolsOptimizationPoint( + id=i, + hisa_id=str(crnPoint.hisa), + service_time_sec=int(point.serviceTime.total_seconds()), + demand=point.demand, + freq=point.visitFrequency, + type=self.__crn_type(point.type), + lat=microLocation.lat, + lon=microLocation.lon, + district=district + ) + orToolsOptimizationPoints.append(orPoint) + + if crnPoint.hisa != 0: + # Insert additional crn points which does not exists in initial routes to crn_initialDistrict + initialDistrict = crn_initialDistrict.get(point.crnPoint.hisa, None) + if initialDistrict is None and config.set_initial: + ballVector = GeoLocation(lat=orPoint.lat, lon=orPoint.lon).ballVector + nearestInitialPointsIndex = initialRoutePointBallTree.query([ballVector], k=1, return_distance=False)[0][0] + nearestInitialCrn = initialOptimizationPoints[nearestInitialPointsIndex].crnPoint.hisa + nearestInitialDistrict = crn_initialDistrict[nearestInitialCrn] + crn_initialDistrict[crnPoint.hisa] = nearestInitialDistrict + log.warning(f"Crn point '{crnPoint.hisa}' is missing in initial routes, nearest crn district: {nearestInitialDistrict}") + + # Log first 10 points + if i < 10: + log.info(orPoint) + + log.info('Mapping optimization vehicles') + orToolsOptimizationVehicles: list[OrToolsOptimizationVehicle] = [] + optimizationVehicleAll: list[OptimizationVehicle] = [] + tempOrVehicleIndex_district: dict[int, str] = {} + orVehicleIndex_district: dict[int, str] = {} + for vehicle in optimizationVehicles: + districts = vehicle.districts.split(",") + for i in range(vehicle.maxQuantity): + orVehicle = OrToolsOptimizationVehicle( + id=len(orToolsOptimizationVehicles), + name=vehicle.name, + route_type=self.__route_type(vehicle.type), + capacity=vehicle.capacity, + range_km=vehicle.range / 1000, + working_time_h=vehicle.deliveryTime, + priority=i < vehicle.minQuantity, + districts=vehicle.districts.split(",") + ) + + # Assign district to vehicle + if len(districts) > 0: + district = districts.pop(0) + tempOrVehicleIndex_district[orVehicle.id] = district + orVehicleIndex_district[orVehicle.id] = district + + log.info(orVehicle) + optimizationVehicleAll.append(vehicle) + orToolsOptimizationVehicles.append(orVehicle) + + # TODO: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! On backend get initial district from crn if no confirmed optimization allready exists otherwise get initial district from last confirmed optimization. + initialRoutes: list[list[CrnPoint]] = [] + if config.set_initial: + log.info("Construct initial routes for vehicles") + for i, vehicle in enumerate(orToolsOptimizationVehicles): + vehicleDistrict = tempOrVehicleIndex_district.pop(i, None) + initialRoutes.append([]) + if vehicleDistrict is None: + continue + for route in initialOptimizationResultData.optimizationResult.routes: + if route.name == vehicleDistrict: + for routePoint in route.points: + if routePoint.crnPoint.hisa != 0: + initialRoutes[-1].append(routePoint.crnPoint) + if len(initialRoutes[-1]) > 0: + route = [ir.hisa for ir in initialRoutes[-1]] + log.info([ + f"{i}. {vehicleDistrict}.{vehicle.name}[{len(route)}]:", + f"{route[0]}", f"->", f"{route[-1]}", "...", f"{route}" + ]) + + log.info('Mapping optimization matrices') + time_matrix: dict[str, np.ndarray] = {} + distance_matrix: dict[str, np.ndarray] = {} + for vehicle_type, routeMatrix in routeMatrices.items(): + vehicle_type_name = self.__route_type(type=vehicle_type) + time_matrix[vehicle_type_name] = routeMatrix.durationMatrix() + distance_matrix[vehicle_type_name] = routeMatrix.distanceMatrix() + + # Creating configuration for optimization + orToolsInstance = OrToolsOptimizationInstance( + vehicles=orToolsOptimizationVehicles, + points=orToolsOptimizationPoints, + distance_matrix=distance_matrix, + time_matrix=time_matrix, + initial_routes=[], # <---------------------------- SET THIS LATER!!! + district_percentage=optimization.weight / 100, + log=log + ) + + log.info(f"Use unvisited crns: {optimization.useUnvisitedCrn}") + unvisitedOptimizationPoints: list[OptimizationPoint] = [] + if not optimization.useUnvisitedCrn: + visitedOptimizationPoints = list(filter(lambda op: op.isVisited, optimizationPoints)) + unvisitedOptimizationPoints = list(filter(lambda op: not op.isVisited, optimizationPoints)) + log.warning(f"Unvisited crns[{len(unvisitedOptimizationPoints)}]: {percentage(unvisitedOptimizationPoints, optimizationPoints)}%") + orToolsInstance = self.__filteredOrToolsInstance(orToolsInstance=orToolsInstance, optimizationPoints=visitedOptimizationPoints) + initialRoutes = self.__checkAndBalanceInitialRoutes(initialRoutes=initialRoutes, optimizationPoints=optimizationPoints, log=log) + initialRoutes = self.__filterInitialRoutes(initialRoutes=initialRoutes, optimizationPoints=visitedOptimizationPoints) + + log.info("Put initial route crn indexes to initial routes as is their place in optimization points list") + crn_optimizationPointIndex: dict[int, int] = {} + for i, op in enumerate(orToolsInstance.points): + hisa = int(op.hisa_id) + if hisa != 0: + crn_optimizationPointIndex[hisa] = i + + log.info("Set initial routes") + orToolsInstance.initial_routes = [[crn_optimizationPointIndex[crnPoint.hisa] for crnPoint in route] for route in initialRoutes] + + # Stop callback + def stop_callback_fn() -> bool: + return terminationCallback() + + # Solution callback + def solution_callback_fn(objective: int, solution: list[OrToolsOptimizationSolution], finished: bool, overlapping: dict[int, float] | None): + mappedSolution = [] + for os in solution: + optimizationVehicle = optimizationVehicleAll[os.vehicle_id] + + district = None + if optimization.weight > 0: + district = os.district if os.district is not None else orVehicleIndex_district.get(os.vehicle_id, None) + + kwargs = dict( + isExtra=os.dummy, + optimizationVehicleId=optimizationVehicle.id, + hise=[int(hi) for hi in os.hisa_ids], + distance=os.distance, + duration=os.duration, + district=district, + cost=os.cost + ) + mappedSolution.append(OptimizationSolution(**kwargs)) + + solutionCallback(objective, mappedSolution, finished, unvisitedOptimizationPoints, overlapping) + + if optimization.type == OptimizationType.INITIAL: + return self.__generateInitialSolution( + solvingTime=optimization.optimizationTime, + orToolsInstance=orToolsInstance, + solutionCallback=solution_callback_fn, + district_centrality=optimization.useDistrictCentrality, + log=log, + ) + elif optimization.type == OptimizationType.TEST: + return self.__generateTestSolution( + testingOptimizationPoints=self.__filterOptimizationPoints( + optimization=optimization, optimizationPoints=optimizationPoints, log=log), + solvingTime=optimization.optimizationTime, + orToolsInstance=orToolsInstance, + solutionCallback=solution_callback_fn, + log=log, + district_centrality=optimization.useDistrictCentrality, + ) + + # Starting optimization and getting final solution + objective, finalSolution, overlapping = OrToolsOptimizationService().vrpOptimization( + solving_time_sec=int(optimization.optimizationTime.total_seconds()), + instance=orToolsInstance, + config=config, + solution_callback_fn=lambda obj, sol, over: solution_callback_fn(objective=obj, solution=sol, finished=False, overlapping=over), + stop_callback_fn=stop_callback_fn, + log=log + ) + + solution_callback_fn(objective=objective, solution=finalSolution, finished=True, overlapping=overlapping) + + def __filterInitialRoutes(self, initialRoutes: list[list[CrnPoint]], optimizationPoints: list[OptimizationPoint]) -> list[list[CrnPoint]]: + """ Filter initial crns that are present inside optimization points """ + allowedHise = [op.crnPoint.hisa for op in optimizationPoints] + filteredInitialRoutes = [] + for route in initialRoutes: + filteredInitialRoute = [] + for crnPoint in route: + if crnPoint.hisa in allowedHise: + filteredInitialRoute.append(crnPoint) + filteredInitialRoutes.append(filteredInitialRoute) + return filteredInitialRoutes + + def __checkAndBalanceInitialRoutes( + self, initialRoutes: list[list[CrnPoint]], optimizationPoints: list[OptimizationPoint], log: Logger + ) -> list[list[CrnPoint]]: + if len(initialRoutes) == 0: + return [] + + """ Add missing initial crn points, remove not needed crn points """ + log.warning("Start balancing initial routes") + + log.info("Create crn mapping with optimization points as priority") + hisa_crn: dict[int, CrnPoint] = {} + hisa_initial_district: dict[int, int] = {} + for district, initialRoute in enumerate(initialRoutes): + for ip in initialRoute: + hisa_crn[ip.hisa] = ip + hisa_initial_district[ip.hisa] = district + for op in optimizationPoints: + hisa_crn[op.crnPoint.hisa] = op.crnPoint + + log.info("Get all initial crns") + initialHise = [] + for initialRoute in initialRoutes: + for ip in initialRoute: + initialHise.append(ip.hisa) + + log.info("Get all optimization crns") + optimizationHise = {op.crnPoint.hisa for op in optimizationPoints} + uniqueInitialHise = set(initialHise) + + # Check for duplicates + if len(uniqueInitialHise) != len(initialHise): + Exception(f"Initial routes contains duplicates: {[k for (k, v) in Counter(initialHise).items() if v > 1]} ") + if len(optimizationHise) != len(optimizationPoints): + opHise = [op.crnPoint.hisa for op in optimizationPoints] + raise Exception(f"Optimization points contains duplicates: {[k for (k, v) in Counter(opHise).items() if v > 1]} ") + + allCrns = list(hisa_crn.values()) + allCrnLocations = [crn.location.ballVector for crn in allCrns] + crnBallTree = BallTree(allCrnLocations, metric='haversine') + + missingInitialHise = optimizationHise - uniqueInitialHise + notUsedInitialHise = uniqueInitialHise - optimizationHise + + if len(missingInitialHise) > 0: + log.warning(f"Missing initial crns: {len(missingInitialHise)}: {missingInitialHise}") + if len(notUsedInitialHise) > 0: + log.warning(f"Not used initial crns: {len(notUsedInitialHise)}: {notUsedInitialHise}") + + # Insert missing crns to initial routes + log.info("Insert missing crns to initial routes") + for mih in missingInitialHise: + if mih == 0: # DO NOT INSERT POST OFFICE TO INITIAL ROUTES!!!!!!!!!!!!!!! + continue + missingCrn = hisa_crn[mih] + closestCrnIndexes = crnBallTree.query([missingCrn.location.ballVector], k=int(len(optimizationPoints) / 2), return_distance=False)[0][:1] + + # Find to which district we can insert missing district + inserted = False + for closestCrnIndex in closestCrnIndexes: + closestCrn = allCrns[closestCrnIndex] + # We found closest crn that exists in initial districts we know where to insert it... + if closestCrn.hisa in hisa_initial_district: + closestCrnDistrict = hisa_initial_district[closestCrn.hisa] + initialRoutes[closestCrnDistrict].append(missingCrn) + inserted = True + break + + # If I could not inserted crn insert in random initial route + if not inserted: + initialRoutes[0].append(missingCrn) + + # Remove not used initial crns + for nuih in notUsedInitialHise: + notUsedCrn = hisa_crn[nuih] + notUsedCrnDistrict = hisa_initial_district[nuih] + initialRoutes[notUsedCrnDistrict].remove(notUsedCrn) + + return initialRoutes + + def __route_type(self, type: TransportMode) -> str: + match type: + case TransportMode.BIKE: + return 'bike' + case TransportMode.CAR: + return 'car' + case TransportMode.EV: + return 'ev' + case TransportMode.KM: + return 'km' + case TransportMode.KPM: + return 'kpm' + case TransportMode.MK: + return 'mk' + case TransportMode.WALK: + return 'foot' + case _: + raise TypeError(f"Mapping for transport mode does not exists: {type}") + + def __crn_type(self, type: OptimizationPointType) -> Literal['crn', 'depot', 'refill']: + if type == OptimizationPointType.CRN: + return 'crn' + elif type == OptimizationPointType.POSTA: + return 'depot' + elif type == OptimizationPointType.DOSTAVNIK: + return 'refill' + + raise TypeError(f"CRN type '{type}' currently not supported!") + + def __filteredOrToolsInstance( + self, orToolsInstance: OrToolsOptimizationInstance, optimizationPoints: list[OptimizationPoint] + ) -> OrToolsOptimizationInstance: + + depotOptPoint = orToolsInstance.points[0] + crnOptPoints = orToolsInstance.points[1:] + filteredHisaIds = [tOptP.crnPoint.hisa for tOptP in optimizationPoints] + + optPointIndexes = [0] + newOtimizationPoints = [depotOptPoint] # Depot must be on the first index!!!!!!!!!!! + + # Fetch district optimization points and indexes for generating matrixes + for i, crnOptPoint in enumerate(crnOptPoints): + if int(crnOptPoint.hisa_id) in filteredHisaIds: + optPointIndexes.append(i + 1) + newOtimizationPoints.append(crnOptPoint) + + # Reset index to match new distance and time matrix + for i, optPoint in enumerate(newOtimizationPoints): + optPoint.id = i + + # Generate new distance matrices + distance_matrix: dict[str, np.ndarray] = {} + for vehicleType, matrix in orToolsInstance.distance_matrix.items(): + distance_matrix[vehicleType] = matrix[np.ix_(optPointIndexes, optPointIndexes)] + + # Generate new time matrices + time_matrix: dict[str, np.ndarray] = {} + for vehicleType, matrix in orToolsInstance.time_matrix.items(): + time_matrix[vehicleType] = matrix[np.ix_(optPointIndexes, optPointIndexes)] + + orToolsInstance.points = newOtimizationPoints + orToolsInstance.distance_matrix = distance_matrix + orToolsInstance.time_matrix = time_matrix + + return orToolsInstance + + def __generateTestSolution( + self, solvingTime: timedelta, + testingOptimizationPoints: list[OptimizationPoint], + orToolsInstance: OrToolsOptimizationInstance, + solutionCallback: Callable[[int, list[OrToolsOptimizationSolution], bool, Optional[dict[int, float]]], None], + log: Logger, + district_centrality: bool + ): + log.info("Generating test solution") + + orToolsInstance = self.__filteredOrToolsInstance(orToolsInstance=orToolsInstance, optimizationPoints=testingOptimizationPoints) + + # Starting optimization and getting final solution + objective, solution, overlapping = OrToolsOptimizationService().vrpOptimization( + solving_time_sec=int(solvingTime.total_seconds()), + instance=orToolsInstance, + config=self.config(setInitial=False, district_centering=district_centrality), + log=log + ) + + solutionCallback(objective, solution, True, overlapping) + + def __generateInitialSolution( + self, solvingTime: timedelta, orToolsInstance: OrToolsOptimizationInstance, + solutionCallback: Callable[[int, list[OrToolsOptimizationSolution], bool, Optional[dict[int, float]]], None], + log: Logger, + district_centrality: bool + + ): + log.info("Generating initial solution") + + # Remove vehicles constraints + for vehicle in orToolsInstance.vehicles: + vehicle.working_time_h = 1e3 + vehicle.range_km = 1e3 + vehicle.capacity = 1e3 + + depotOptPoint = orToolsInstance.points[0] + crnOptPoints = orToolsInstance.points[1:] + + districts = set([optPoint.district for optPoint in crnOptPoints]) # Depot is on the first index!!!!!!!!! + solvingTimeSec = int((solvingTime / len(districts)).total_seconds()) + combinedSolutions = [] + combinedObjective = 0 + + for districtI, district in enumerate(sorted(list(districts))): + log.info(f"Optimizing district[{districtI}/{len(districts)}] = '{district}'") + + log.info(f"Searching for appropriate vehicles for district '{district}'") + districtVehicles = [] + for vehicle in orToolsInstance.vehicles: + if district in vehicle.districts: + log.info(f"Found vehicle: {vehicle}") + districtVehicles.append(vehicle) + + districtVehicles = districtVehicles[:1] + log.info(f"Force one vehicle for district '{district}': {districtVehicles}") + + if len(districtVehicles) == 0: + log.warning(f"No vehicles found for district '{district}' (using any free vehicle that has no district assigned) instead") + districtVehicles = [vehicle for vehicle in orToolsInstance.vehicles if len(vehicle.districts) == 0] + + districtOptPointIndexes = [0] + districtOptPoints = [depotOptPoint] # Depot must be on the first index!!!!!!!!!!! + + # Fetch district optimization points and indexes for generating matrixes + for crnI, crnOptPoint in enumerate(crnOptPoints): + if crnOptPoint.district == district: + districtOptPointIndexes.append(crnI + 1) + districtOptPoints.append(crnOptPoint) + elif crnOptPoint.district not in districts: + log.warning(f"CRN without district: {crnOptPoint}") + + # Reset index to match new distance and time matrix + for optPointI, optPoint in enumerate(districtOptPoints): + optPoint.id = optPointI + + # Generate new distance matrices + district_distance_matrix: dict[str, np.ndarray] = {} + for vehicleType, matrix in orToolsInstance.distance_matrix.items(): + district_distance_matrix[vehicleType] = matrix[np.ix_(districtOptPointIndexes, districtOptPointIndexes)] + + # Generate new time matrices + district_time_matrix: dict[str, np.ndarray] = {} + for vehicleType, matrix in orToolsInstance.distance_matrix.items(): + district_time_matrix[vehicleType] = matrix[np.ix_(districtOptPointIndexes, districtOptPointIndexes)] + + districtOrToolsInstance = OrToolsOptimizationInstance( + vehicles=districtVehicles, + points=districtOptPoints, + distance_matrix=district_distance_matrix, + time_matrix=district_time_matrix, + initial_routes=[[]], + log=log + ) + + # Starting optimization and getting final solution + objective, districtSolutions, overlapping = OrToolsOptimizationService().vrpOptimization( + solving_time_sec=solvingTimeSec, + instance=districtOrToolsInstance, + config=self.config(setInitial=False, district_centering=district_centrality), + log=log + ) + + numOfDistrictSolutions = len(districtSolutions) + if numOfDistrictSolutions != 1: + raise Exception(f"Solution for one district should have one solution but instead has: {numOfDistrictSolutions}") + + for solution in districtSolutions: + solution.vehicle_id = districtVehicles[solution.vehicle_id].id + solution.district = district + combinedSolutions.append(solution) + combinedObjective += objective + + solutionCallback(objective, combinedSolutions, False, None) + + solutionCallback(combinedObjective, combinedSolutions, True, None) + + def __filterOptimizationPoints( + self, optimization: Optimization, optimizationPoints: list[OptimizationPoint], log: Logger + ) -> list[OptimizationPoint]: + + optPoints = [] + + titleInfo = optimization.title.split() + log.info(f"Optimization parameters: {titleInfo}") + + match titleInfo[0]: + case "RADIUS": + radius = float(titleInfo[1]) + depot = optimizationPoints[0] + for optPoint in optimizationPoints[1:]: + if depot.crnPoint.location.distance(optPoint.crnPoint.location) < radius: + optPoints.append(optPoint) + case "SQUARE": + lats = [float(titleInfo[1]), float(titleInfo[3])] + lons = [float(titleInfo[2]), float(titleInfo[4])] + for optPoint in optimizationPoints[1:]: + if lats[0] < optPoint.crnPoint.location.lat < lats[1] and lons[0] < optPoint.crnPoint.location.lon < lons[1]: + optPoints.append(optPoint) + case "STREET": + streetName = titleInfo[1] + for optPoint in optimizationPoints[1:]: + if streetName in optPoint.crnPoint.naslov: + optPoints.append(optPoint) + case _: + raise Exception(f"Unknown testing category '{titleInfo[0]}'") + + log.info(f"Testing optimization points: {len(optPoints)}") + return optPoints diff --git a/admiral-worker/buildSrc/common.mk b/admiral-worker/buildSrc/common.mk new file mode 100644 index 0000000..b1f7821 --- /dev/null +++ b/admiral-worker/buildSrc/common.mk @@ -0,0 +1,39 @@ +VIRTUAL_ENV = venv +PATH := $(VIRTUAL_ENV)/bin:$(PATH) +PWD=$(shell pwd) +VERSION=$(shell grep "^project_version=" "../posta-poi-app-backend/project.properties" | cut -d'=' -f2) + +.DEFAULT_GOAL := help + +PACKAGE = mylinux +NOW_DATE=`date +%Y-%m-%d %H:%M:%S` + +define BROWSER_PYSCRIPT +import os, webbrowser, sys +try: + from urllib import pathname2url +except: + from urllib.request import pathname2url +webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) +endef +export BROWSER_PYSCRIPT +BROWSER := python -c "$$BROWSER_PYSCRIPT" + + +define PRINT_HELP_PYSCRIPT +import re, sys +for line in sys.stdin: + match = re.match(r'^### (.*) #', line) + if match: + target = match.groups()[0] + print("\n%s" % (target)) + match = re.match(r'^([a-zA-Z0-9_-]+):.*?## (.*)$$', line) + if match: + target, help = match.groups() + print(" %-15s %s" % (target, help)) +endef +export PRINT_HELP_PYSCRIPT + + +help: + @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) diff --git a/admiral-worker/cli/run_optimizationResults_migrations.py b/admiral-worker/cli/run_optimizationResults_migrations.py new file mode 100644 index 0000000..525ea5b --- /dev/null +++ b/admiral-worker/cli/run_optimizationResults_migrations.py @@ -0,0 +1,58 @@ +from collections import defaultdict +from uuid import UUID + +from app.App import App +from core.domain.map.CrnMicroUpdate import CrnMicroUpdateState, CrnMicroUpdate +from core.domain.map.CrnPoint import CrnPoint +from core.extend.logger import Progress +from core.types.Id import Id + +App.init() + +# ids = App.repos.optimizationResultRepo.getAllIds() +microLocations = App.repos.crnMicroUpdateRepo.getAllByState(state=CrnMicroUpdateState.POTRJENO) +ids = [Id(value=UUID('70faa8ef-83da-4b5f-9e76-84d1d5acb995'))] +posta_hisa_crn: dict[int, dict[int, (CrnPoint, CrnMicroUpdate)]] = defaultdict(dict) +hisa_micro: dict[int, CrnMicroUpdate] = {} + +for m in sorted(microLocations, key=lambda x: x.createdAt): + hisa_micro[m.hisa] = m + +progress = Progress("Optimization result migration", ids) +for i, id in enumerate(ids): + optimizationResult = App.repos.optimizationResultRepo.get(id=id) + opt = App.repos.optimizationRepo.get(id=optimizationResult.optimizationId) + posta = opt.posta.value + postOffice = App.repos.postOfficeRepo.get(posta=posta) + if posta not in posta_hisa_crn: + posta_hisa_crn[posta] = {crn.hisa: crn for crn in App.services.postaService.getCrnPoints(posta=posta)} + hisa_crn = posta_hisa_crn[posta] + for ri, route in enumerate(optimizationResult.routes): + for pi, point in enumerate(route.points): + point.crnPoint.attr.dostopnost = 'Kolo' + point.crnPoint.attr.namen = None + point.crnPoint.attr.nevarenPes = None + point.crnPoint.attr.dostopenNabiralnik = None + + mikro = hisa_micro.get(point.crnPoint.hisa, None) + if mikro is not None: + point.crnPoint.attr.mikroLat = mikro.lat + point.crnPoint.attr.mikroLon = mikro.lon + + optimizationResult.routes[ri].points[pi] = point + + for pi, point in enumerate(optimizationResult.unvisited): + point.crnPoint.attr.dostopnost = 'Kolo' + point.crnPoint.attr.namen = None + point.crnPoint.attr.nevarenPes = None + point.crnPoint.attr.dostopenNabiralnik = None + + mikro = hisa_micro.get(point.crnPoint.hisa, None) + if mikro is not None: + point.crnPoint.attr.mikroLat = mikro.lat + point.crnPoint.attr.mikroLon = mikro.lon + + optimizationResult.unvisited[pi] = point + + App.repos.optimizationResultRepo.post(optimizationResult) + progress.log(i, f"https://optdos.posta.si/optimizations/{opt.id.value}/result/{optimizationResult.id.value}") diff --git a/admiral-worker/cli/run_optimization_worker.py b/admiral-worker/cli/run_optimization_worker.py new file mode 100644 index 0000000..159151b --- /dev/null +++ b/admiral-worker/cli/run_optimization_worker.py @@ -0,0 +1,12 @@ +from dotenv import load_dotenv +load_dotenv() +from core.extend.logger import logFile + +print(f"Logfile: {logFile}") +from app.App import App + + + +if __name__ == '__main__': + App.init() + App.usecases.run_optimization_worker.now() diff --git a/admiral-worker/core/Env.py b/admiral-worker/core/Env.py new file mode 100644 index 0000000..0425da1 --- /dev/null +++ b/admiral-worker/core/Env.py @@ -0,0 +1,11 @@ +import os + +from dotenv import load_dotenv + +load_dotenv() + + +class Env: + LOGGING_DIR: str = os.getenv("LOGGING_DIR") + KEYSET_ENCRYPTED: str = os.getenv("KEYSET_ENCRYPTED") + FTP_PATH: str = os.getenv("FTP_PATH") diff --git a/admiral-worker/core/Utils.py b/admiral-worker/core/Utils.py new file mode 100644 index 0000000..b09c2da --- /dev/null +++ b/admiral-worker/core/Utils.py @@ -0,0 +1,125 @@ +import hashlib +import json +from collections import Counter +from dataclasses import fields +from datetime import timedelta, datetime, date +from enum import Enum +from pathlib import Path +from typing import Optional + +import pytz +from pypolyline.cutil import decode_polyline +from pytz import tzinfo + +from core.domain.map.GeoLocation import GeoLocation +from core.types.Id import Id +from core.types.IntId import IntId + + +def hash(o: str, size: Optional[int] = None) -> str: + # TODO return this!: hash = hashlib.sha1(o.encode()).hexdigest() + # if size is not None: + # hash = hash[-size:] + return o + + +def fileHash(path: Path, size: Optional[int] = None) -> str: + with path.open('r', encoding='utf-8') as f: + text = f.read() + hash = hashlib.sha1(text.encode()).hexdigest() + if size is not None: + hash = hash[-size:] + return hash + + +def chunks(data, size): + for i in range(0, len(data), size): + yield data[i:i + size] + + +def hash8(o: str) -> str: + return hash(o, size=8) + + +def encode(o: any): + if isinstance(o, Id): + return str(o.value) + if isinstance(o, IntId): + return int(o.value) + if isinstance(o, timedelta): + return int(o.total_seconds()) + if isinstance(o, datetime): + return int(o.timestamp()) + if isinstance(o, date): + return int(datetime.combine(o, datetime.min.time()).timestamp()) + if isinstance(o, Enum): + return o.value + if isinstance(o, set): + return list(o) + if isinstance(o, list): + return o + return o.__dict__ + + +def json_dump(o: any, f): + json.dump(obj=o, fp=f, default=encode) + + +def json_dumps(o: any) -> str: + return json.dumps(obj=o, default=encode) + + +def json_loads(o: any) -> str: + return json.loads(o) + + +def current_datetime_str() -> str: + return datetime.now().isoformat().replace(':', '_') + + +def polyline_decode(data: str) -> list[GeoLocation]: + return [GeoLocation(lon=arr[0], lat=arr[1]) for arr in decode_polyline(data.encode(), 6)] + + +def datetimeRange(start: datetime, end: datetime, step: timedelta) -> list[datetime]: + dates = [] + currentDay = start + while currentDay <= end: + dates.append(currentDay) + currentDay += step + + return dates + + +def saveDivision(a: float, b: float, default: float = None) -> Optional[float]: + return a / b if b else default + + +def percentage(a: list | int, b: list | int) -> Optional[float]: + aNum = len(a) if isinstance(a, list) else a + bNum = len(b) if isinstance(b, list) else b + if bNum == 0: + return None + return round(aNum / bNum * 100, 2) + + +def fromLocalToUtc(dt: datetime, localTimezone: tzinfo) -> datetime: + utc = localTimezone.localize(dt).astimezone(pytz.utc) + return datetime(year=utc.year, month=utc.month, day=utc.day, hour=utc.hour, minute=utc.minute, second=utc.second, microsecond=utc.microsecond) + + +def dateRange(start: date, end: date) -> set[date]: + return set([start + timedelta(days=x) for x in range((end - start).days)] + [end]) + + +def initDataclass(cls: any, **kwargs): + valid_field_names = {field.name for field in fields(cls)} + return cls(**{k: v for k, v in kwargs.items() if k in valid_field_names}) + +# Find legs of optimization result +def countDuplicates(arr): + # Count occurrences of each number + counts = Counter(arr) + # Filter numbers that occur more than once + duplicates = [(num, count) for num, count in counts.items() if count > 1] + return duplicates diff --git a/admiral-worker/core/Var.py b/admiral-worker/core/Var.py new file mode 100644 index 0000000..6f312ea --- /dev/null +++ b/admiral-worker/core/Var.py @@ -0,0 +1,46 @@ +import pytz + +from core import Utils +from core.domain.map.GpsSession import GpsSession +from core.domain.optimization.TransportMode import TransportMode + + +class Var: + localTimezone = pytz.timezone("Europe/Ljubljana") + + @staticmethod + def vrpOptimizationFileName() -> str: + return f"vrpOptimization_{Utils.current_datetime_str()}.bin" + + @staticmethod + def optimizationFilesZipName(posta: int) -> str: + return f"{posta}_optimizationFiles.zip" + + @staticmethod + def download_optimizationFilesZipName(posta: int) -> str: + timestamp = Utils.current_datetime_str() + return f"{timestamp}/download_{posta}_optimizationFiles_{timestamp}.zip" + + @staticmethod + def upload_optimizationFilesZipName(posta: int) -> str: + return f"upload_{posta}_optimizationFiles_{Utils.current_datetime_str()}.zip" + + @staticmethod + def optimizationPointsFileName(posta: int) -> str: + return f"{posta}_optimizationPoints.json" + + @staticmethod + def gpsFileName(posta: int) -> str: + return f"{posta}_gps.csv" + + @staticmethod + def gpsSessionFileName(gpsSession: GpsSession) -> str: + return f"{gpsSession.id.value}_gpsSession_gps.csv" + + @staticmethod + def distanceRouteMatrixFileName(posta: int, transportMode: TransportMode) -> str: + return f"{posta}_{transportMode.value}_distance_matrix.npy" + + @staticmethod + def durationRouteMatrixFileName(posta: int, transportMode: TransportMode) -> str: + return f"{posta}_{transportMode.value}_duration_matrix.npy" diff --git a/admiral-worker/core/domain/Delivery.py b/admiral-worker/core/domain/Delivery.py new file mode 100644 index 0000000..8960d80 --- /dev/null +++ b/admiral-worker/core/domain/Delivery.py @@ -0,0 +1,14 @@ +from dataclasses import dataclass +from datetime import timedelta + +from core.domain.map.CrnAttributes import CrnAttributes +from core.domain.map.GeoLocation import GeoLocation + + +@dataclass(eq=True, frozen=False) +class Package: + id: str + pickup: GeoLocation + dropof: GeoLocation + weight: float + volume: float diff --git a/admiral-worker/core/domain/GeoLocation.py b/admiral-worker/core/domain/GeoLocation.py new file mode 100644 index 0000000..cb79969 --- /dev/null +++ b/admiral-worker/core/domain/GeoLocation.py @@ -0,0 +1,26 @@ +from dataclasses import dataclass +from math import cos, asin, sqrt, pi +from math import radians + +from typing_extensions import Self + + +@dataclass(eq=True, frozen=True) +class GeoLocation: + lat: float + lon: float + + @property + def ballVector(self) -> tuple[float, float]: + return radians(self.lat), radians(self.lon) + + @property + def vector(self) -> tuple[float, float]: + return self.lat, self.lon + + def distance(self, geoLocation: Self) -> float: + r = 6371000 # m + p = pi / 180 + + a = 0.5 - cos((geoLocation.lat-self.lat)*p)/2 + cos(self.lat*p) * cos(geoLocation.lat*p) * (1-cos((geoLocation.lon-self.lon)*p))/2 + return 2 * r * asin(sqrt(a)) diff --git a/admiral-worker/core/domain/RouteInfo.py b/admiral-worker/core/domain/RouteInfo.py new file mode 100644 index 0000000..e1225ba --- /dev/null +++ b/admiral-worker/core/domain/RouteInfo.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass, field + +from core.domain.map.GeoLocation import GeoLocation + + +@dataclass +class RouteInfo: + distance: float # Kilometers + duration: float # Seconds + steps: list[GeoLocation] = field(default_factory=list) diff --git a/admiral-worker/core/domain/optimization/Optimization.py b/admiral-worker/core/domain/optimization/Optimization.py new file mode 100644 index 0000000..9652df6 --- /dev/null +++ b/admiral-worker/core/domain/optimization/Optimization.py @@ -0,0 +1,32 @@ +import datetime +from dataclasses import dataclass, field +from typing import Optional + +from typing_extensions import Self + +from core.domain.optimization.OptimizationState import OptimizationState +from core.domain.map.PostOffice import PostOffice +from core.domain.optimization.OptimizationType import OptimizationType +from core.types.Id import Id +from core.types.IntId import IntId + + +@dataclass +class Optimization: + posta: IntId[PostOffice] + title: str + description: str + dates: list[datetime.date] + weight: int + optimizationTime: datetime.timedelta + createdAt: datetime.datetime + authorizedByUserId: str + state: OptimizationState + type: OptimizationType + parent: Optional[Id[Self]] + useFrequency: bool = False + useUnvisitedCrn: bool = True + useDistrictCentrality: bool = True + staticServiceTimes: Optional[int] = None + stateChangedAt: datetime.datetime = field(default_factory=datetime.datetime.now) + id: Id[Self] = Id.field() diff --git a/admiral-worker/core/domain/optimization/OptimizationFiles.py b/admiral-worker/core/domain/optimization/OptimizationFiles.py new file mode 100644 index 0000000..115cbad --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationFiles.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass, field + +from core.domain.map.RouteMatrix import RouteMatrix +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.TransportMode import TransportMode + + +@dataclass +class OptimizationFiles: + optimizationPoints: list[OptimizationPoint] = field(default_factory=list) + routeMatrices: dict[TransportMode, RouteMatrix] = field(default_factory=dict) diff --git a/admiral-worker/core/domain/optimization/OptimizationMetrics.py b/admiral-worker/core/domain/optimization/OptimizationMetrics.py new file mode 100644 index 0000000..7022ca1 --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationMetrics.py @@ -0,0 +1,18 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Optional + +from core.domain.optimization.Optimization import Optimization +from core.types.Id import Id + + +@dataclass +class OptimizationMetrics: + optimizationId: Id[Optimization] + solution: int + vehicles: int + cost: float + distance: float + duration: float + overlapping: Optional[dict[int, float]] + createdAt: datetime diff --git a/admiral-worker/core/domain/optimization/OptimizationPoint.py b/admiral-worker/core/domain/optimization/OptimizationPoint.py new file mode 100644 index 0000000..8f9347f --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationPoint.py @@ -0,0 +1,27 @@ +from dataclasses import dataclass +from datetime import timedelta + +from core.domain.map.CrnPoint import CrnPoint +from core.domain.optimization.OptimizationPointType import OptimizationPointType + + +@dataclass +class OptimizationPoint: + crnPoint: CrnPoint + serviceTime: timedelta + demand: int + visitFrequency: float + type: OptimizationPointType + group = None + + @staticmethod + def fromJson(**kwargs) -> 'OptimizationPoint': + kwargs['serviceTime'] = timedelta(seconds=kwargs['serviceTime']) + kwargs['crnPoint'] = CrnPoint.fromJson(**kwargs['crnPoint']) + kwargs['type'] = OptimizationPointType(kwargs['type']) + + return OptimizationPoint(**kwargs) + + @property + def isVisited(self): + return self.visitFrequency > 0 and self.crnPoint.isVisited diff --git a/admiral-worker/core/domain/optimization/OptimizationPointType.py b/admiral-worker/core/domain/optimization/OptimizationPointType.py new file mode 100644 index 0000000..42c31f4 --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationPointType.py @@ -0,0 +1,12 @@ +from enum import auto + +from core.types.AutoStrEnum import AutoStrEnum + + +class OptimizationPointType(AutoStrEnum): + CRN = auto() + POSTA = auto() + DOSTAVNIK = auto() + IZROCILNA_POSTA = auto() + POSTNI_NABIRALNIK = auto() + CRN_NEOBISKANA = auto() diff --git a/admiral-worker/core/domain/optimization/OptimizationResult.py b/admiral-worker/core/domain/optimization/OptimizationResult.py new file mode 100644 index 0000000..0b9946d --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationResult.py @@ -0,0 +1,23 @@ +import datetime +from dataclasses import dataclass, field +from typing import Optional + +from typing_extensions import Self + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.OptimizationRoute import OptimizationRoute +from core.types.Id import Id + + +@dataclass +class OptimizationResult: + optimizationId: Id[Optimization] + routes: list[OptimizationRoute] + info: str + authorizedByUserId: str + parent: Optional[Id[Self]] + createdAt: int = field(default_factory=lambda: int(datetime.datetime.now().timestamp())) + unvisited: list[OptimizationPoint] = field(default_factory=list) + + id: Id[Self] = Id.field() diff --git a/admiral-worker/core/domain/optimization/OptimizationResultData.py b/admiral-worker/core/domain/optimization/OptimizationResultData.py new file mode 100644 index 0000000..7c1d4ac --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationResultData.py @@ -0,0 +1,18 @@ +import datetime +from dataclasses import dataclass, field + +from typing_extensions import Self + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationResult import OptimizationResult +from core.domain.optimization.OptimizationState import OptimizationState +from core.domain.map.PostOffice import PostOffice +from core.domain.optimization.OptimizationType import OptimizationType +from core.types.Id import Id +from core.types.IntId import IntId + + +@dataclass +class OptimizationResultData: + optimization: Optimization + optimizationResult: OptimizationResult diff --git a/admiral-worker/core/domain/optimization/OptimizationRoute.py b/admiral-worker/core/domain/optimization/OptimizationRoute.py new file mode 100644 index 0000000..6fd5b77 --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationRoute.py @@ -0,0 +1,43 @@ +from dataclasses import dataclass, field +from typing import Optional + +from core import Utils +from core.Utils import initDataclass +from core.domain.map.GeoLocation import GeoLocation +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.OptimizationVehicle import OptimizationVehicle + + +@dataclass +class OptimizationRoute: + name: str + isExtra: bool + index: int + distance: float + duration: int + cost: float + vehicle: OptimizationVehicle + points: list[OptimizationPoint] + steps: list[GeoLocation] + hash: str = field(default_factory=str) + + averageDistance: Optional[float] = None + averageDuration: Optional[int] = None + + @staticmethod + def fromJson(**kwargs) -> 'OptimizationRoute': + kwargs['vehicle'] = OptimizationVehicle(**kwargs['vehicle']) + kwargs['points'] = [OptimizationPoint.fromJson(**point) for point in kwargs['points']] + kwargs['steps'] = [GeoLocation(**stepDict) for stepDict in kwargs['steps']] + + return initDataclass(OptimizationRoute, **kwargs) + + def setHash(self): + self.hash = self.__calculateHash() + + @property + def isDirty(self): + return self.__calculateHash() != self.hash + + def __calculateHash(self) -> str: + return Utils.hash("".join([str(point.crnPoint.hisa) for point in self.points])) diff --git a/admiral-worker/core/domain/optimization/OptimizationSolution.py b/admiral-worker/core/domain/optimization/OptimizationSolution.py new file mode 100644 index 0000000..80506ce --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationSolution.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from datetime import timedelta +from typing import Optional + +from core.domain.optimization.OptimizationVehicle import OptimizationVehicle +from core.types.Id import Id + + +@dataclass +class OptimizationSolution: + isExtra: bool + optimizationVehicleId: Id[OptimizationVehicle] + hise: list[int] + distance: int + duration: timedelta + cost: int + district: Optional[str] = None diff --git a/admiral-worker/core/domain/optimization/OptimizationState.py b/admiral-worker/core/domain/optimization/OptimizationState.py new file mode 100644 index 0000000..161e11f --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationState.py @@ -0,0 +1,13 @@ +from enum import auto + +from core.types.AutoStrEnum import AutoStrEnum + + +class OptimizationState(AutoStrEnum): + CREATED = auto() + ACTIVE = auto() + CANCELED = auto() + COMPLETED = auto() + FAILED = auto() + CONFIRMED = auto() + DELETED = auto() diff --git a/admiral-worker/core/domain/optimization/OptimizationType.py b/admiral-worker/core/domain/optimization/OptimizationType.py new file mode 100644 index 0000000..53fddc0 --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationType.py @@ -0,0 +1,10 @@ +from enum import auto + +from core.types.AutoStrEnum import AutoStrEnum + + +class OptimizationType(AutoStrEnum): + EXACT = auto() + BUILDING_TYPE = auto() + INITIAL = auto() + TEST = auto() diff --git a/admiral-worker/core/domain/optimization/OptimizationVehicle.py b/admiral-worker/core/domain/optimization/OptimizationVehicle.py new file mode 100644 index 0000000..ac0f21f --- /dev/null +++ b/admiral-worker/core/domain/optimization/OptimizationVehicle.py @@ -0,0 +1,23 @@ +from dataclasses import dataclass + +from typing_extensions import Self + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.TransportMode import TransportMode +from core.types.Id import Id + + +@dataclass +class OptimizationVehicle: + optimizationId: Id[Optimization] + name: str + type: TransportMode + capacity: int + range: float # Kilometers + minQuantity: int + maxQuantity: int + deliveryTime: float # Hours + averageSpeed: float + maxSpeed: float + districts: str + id: Id[Self] = Id.field() diff --git a/admiral-worker/core/domain/optimization/TransportMode.py b/admiral-worker/core/domain/optimization/TransportMode.py new file mode 100644 index 0000000..62eaf62 --- /dev/null +++ b/admiral-worker/core/domain/optimization/TransportMode.py @@ -0,0 +1,13 @@ +from enum import auto + +from core.types.AutoStrEnum import AutoStrEnum + + +class TransportMode(AutoStrEnum): + BIKE = auto() + CAR = auto() + EV = auto() + KM = auto() + KPM = auto() + MK = auto() + WALK = auto() diff --git a/admiral-worker/core/domain/worker/Worker.py b/admiral-worker/core/domain/worker/Worker.py new file mode 100644 index 0000000..f35f941 --- /dev/null +++ b/admiral-worker/core/domain/worker/Worker.py @@ -0,0 +1,19 @@ +from dataclasses import dataclass +from typing_extensions import Self + +from core.domain.worker.WorkerState import WorkerState +from core.domain.worker.WorkerType import WorkerType +from core.types.Id import Id + + + +@dataclass +class Worker: + ip: str + type: WorkerType + state: WorkerState + id: Id[Self] = Id.field() + + @property + def name(self): + return f"{self.type.value}-{self.ip}" diff --git a/admiral-worker/core/domain/worker/WorkerJob.py b/admiral-worker/core/domain/worker/WorkerJob.py new file mode 100644 index 0000000..a37b655 --- /dev/null +++ b/admiral-worker/core/domain/worker/WorkerJob.py @@ -0,0 +1,16 @@ +from dataclasses import dataclass +from typing_extensions import Self + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationState import OptimizationState +from core.domain.worker.Worker import Worker +from core.types.Id import Id + + +@dataclass +class WorkerJob: + workerId: Id[Worker] + optimizationId: Id[Optimization] + name: str + state: OptimizationState + id: Id[Self] = Id.field() diff --git a/admiral-worker/core/domain/worker/WorkerJobStatus.py b/admiral-worker/core/domain/worker/WorkerJobStatus.py new file mode 100644 index 0000000..f83e75c --- /dev/null +++ b/admiral-worker/core/domain/worker/WorkerJobStatus.py @@ -0,0 +1,17 @@ +import datetime +from dataclasses import dataclass, field +from typing import Any +from typing_extensions import Self + +from core.types.Id import Id + + +@dataclass +class WorkerJobStatus: + ownerId: Id[Any] + ramTaken: float + cpuUtilization: float + objective: int + createdAt: int = field(default_factory=lambda: int(datetime.datetime.now().timestamp())) + + id: Id[Self] = Id.field() diff --git a/admiral-worker/core/domain/worker/WorkerLog.py b/admiral-worker/core/domain/worker/WorkerLog.py new file mode 100644 index 0000000..803b419 --- /dev/null +++ b/admiral-worker/core/domain/worker/WorkerLog.py @@ -0,0 +1,18 @@ +import datetime +from dataclasses import dataclass, field +from typing import Any +from typing_extensions import Self + +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.types.Id import Id + + +@dataclass +class WorkerLog: + context: str + data: str + ownerId: Id[Any] + level: WorkerLogLevel + createdAt: float = field(default_factory=lambda: datetime.datetime.now().timestamp()) + + id: Id[Self] = Id.field() diff --git a/admiral-worker/core/domain/worker/WorkerLogLevel.py b/admiral-worker/core/domain/worker/WorkerLogLevel.py new file mode 100644 index 0000000..e647946 --- /dev/null +++ b/admiral-worker/core/domain/worker/WorkerLogLevel.py @@ -0,0 +1,15 @@ +import datetime +from dataclasses import dataclass, field +from enum import auto +from typing import Any +from typing_extensions import Self + +from core.types.AutoStrEnum import AutoStrEnum +from core.types.Id import Id + + +class WorkerLogLevel(AutoStrEnum): + DEBUG = auto() + INFO = auto() + WARN = auto() + ERROR = auto() diff --git a/admiral-worker/core/domain/worker/WorkerState.py b/admiral-worker/core/domain/worker/WorkerState.py new file mode 100644 index 0000000..f543d9f --- /dev/null +++ b/admiral-worker/core/domain/worker/WorkerState.py @@ -0,0 +1,14 @@ +import datetime +from dataclasses import dataclass, field +from enum import auto +from typing import Any + +from typing_extensions import Self + +from core.types.AutoStrEnum import AutoStrEnum +from core.types.Id import Id + + +class WorkerState(AutoStrEnum): + NORMAL = auto() + DEPRECATED = auto() diff --git a/admiral-worker/core/domain/worker/WorkerStatus.py b/admiral-worker/core/domain/worker/WorkerStatus.py new file mode 100644 index 0000000..90db619 --- /dev/null +++ b/admiral-worker/core/domain/worker/WorkerStatus.py @@ -0,0 +1,17 @@ +import datetime +from dataclasses import dataclass, field +from typing import Any + +from typing_extensions import Self + +from core.types.Id import Id + + +@dataclass +class WorkerStatus: + ownerId: Id[Any] + ramAvailable: float + cpuUtilization: float + createdAt: int = field(default_factory=lambda: int(datetime.datetime.now().timestamp())) + + id: Id[Self] = Id.field() diff --git a/admiral-worker/core/domain/worker/WorkerType.py b/admiral-worker/core/domain/worker/WorkerType.py new file mode 100644 index 0000000..5c361e8 --- /dev/null +++ b/admiral-worker/core/domain/worker/WorkerType.py @@ -0,0 +1,9 @@ +from enum import auto + +from core.types.AutoStrEnum import AutoStrEnum + + +class WorkerType(AutoStrEnum): + OPTIMIZER = auto() + UPDATER = auto() + GPS = auto() diff --git a/admiral-worker/core/extend/fs.py b/admiral-worker/core/extend/fs.py new file mode 100644 index 0000000..409befc --- /dev/null +++ b/admiral-worker/core/extend/fs.py @@ -0,0 +1,31 @@ +import tempfile +from pathlib import Path + +from core.Env import Env + + +def getPath(_file_, *path) -> Path: + return Path(_file_).parent.joinpath(*path).resolve() + + +def getTempPath(*path) -> Path: + return Path(tempfile.gettempdir()).joinpath(*path) + + +def getWorkerPath(*path) -> Path: + return Path(Env.FTP_PATH, "..", "worker", *path).resolve() + + +def getUpdaterPath(*path) -> Path: + return Path(Env.FTP_PATH, "..", "updater", *path).resolve() + + +def getFtpPath(*path) -> Path: + return Path(Env.FTP_PATH, *path).resolve() + +def getFtpPaths(*path, glob='**/*') -> list[Path]: + p = Path(Env.FTP_PATH, *path).glob(glob) + return [x for x in p if x.is_file()] + +def getFtpGpsPath(*path) -> Path: + return Path(Env.FTP_PATH, "gps", *path).resolve() diff --git a/admiral-worker/core/extend/logger.py b/admiral-worker/core/extend/logger.py new file mode 100644 index 0000000..ae26abe --- /dev/null +++ b/admiral-worker/core/extend/logger.py @@ -0,0 +1,86 @@ +import logging +import sys +import time +from logging.config import dictConfig +from typing import Sized + +import urllib3 + +from core.Env import Env +from core.extend import fs + +logFile = fs.getPath(__file__, f"../../logs/{Env.LOGGING_DIR}.log").resolve() + +logFile.parent.mkdir(parents=True, exist_ok=True) + +# WARGNINGS +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + +# ERRORS ON WINDOWS CONSOLE +sys.stdin.reconfigure(encoding='utf-8') +if hasattr(sys.stdout, "reconfigure"): + sys.stdout.reconfigure(encoding='utf-8') + + +logging.config.dictConfig({ + 'version': 1, + 'disable_existing_loggers': True, + 'formatters': { + 'default': { + 'format': "%(asctime)s | %(processName)s | %(module)40s:%(lineno)-3d | %(levelname)-7s | %(message)s", + 'datefmt': "%Y.%m.%d %H:%M:%S", + }, + }, + 'handlers': { + 'console': { + 'level': 'INFO', + 'class': 'logging.StreamHandler', + 'formatter': 'default', + 'stream': sys.stderr, + }, + 'file': { + 'level': 'DEBUG', + 'class': "logging.handlers.RotatingFileHandler", + 'formatter': 'default', + 'encoding': 'UTF-8', + "filename": logFile, + "maxBytes": 1e9, + "backupCount": 10, + }, + }, + 'loggers': { + 'app.services.PostaApiService': { + 'level': 'WARN' + }, + 'sqlalchemy.engine.Engine': { + 'level': 'WARN' + }, + '': { + 'handlers': ['console', 'file'], + 'level': 'INFO', + 'propagate': True + } + } +}) + +log = logging.getLogger(__name__) + + +class Progress: + def __init__(self, title: str, iter: Sized | int): + self.title = title + self.size = len(iter) if isinstance(iter, Sized) else iter + self.startTime = time.time() + self.nowTime = self.startTime + + def log(self, i: int, info: str = None) -> callable: + nowTime = time.time() + + if nowTime - self.nowTime > 1: + percent = round(i / self.size * 100, 2) + secondsPerIter = (nowTime - self.startTime) / (i + 1) + iterLeft = self.size - i + secondsLeft = secondsPerIter * iterLeft + minutesLeft = round(secondsLeft / 60, 2) + log.info(f"{self.title if info is None else info}: {percent}% => {iterLeft} => {minutesLeft} minutes") + self.nowTime = nowTime diff --git a/admiral-worker/core/repos/OptimizationMetricsRepo.py b/admiral-worker/core/repos/OptimizationMetricsRepo.py new file mode 100644 index 0000000..0398a5b --- /dev/null +++ b/admiral-worker/core/repos/OptimizationMetricsRepo.py @@ -0,0 +1,34 @@ +import json +import uuid +from abc import abstractmethod +from dataclasses import dataclass + +from sqlalchemy import Engine, BLOB, PrimaryKeyConstraint +from sqlmodel import SQLModel, Field, Session, select +from typing_extensions import override, Self, Optional + +from core import Utils +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationMetrics import OptimizationMetrics +from core.repos.OptimizationResultRepo import OptimizationResultRepo +from core.types.Id import Id + + +@dataclass +class OptimizationMetricsRepo: + + @abstractmethod + def getAll(self) -> list[OptimizationMetrics]: + pass + + @abstractmethod + def get(self, id: Id[OptimizationMetrics]) -> Optional[OptimizationMetrics]: + pass + + @abstractmethod + def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationMetrics]: + pass + + @abstractmethod + def post(self, optimizationMetrics: OptimizationMetrics) -> OptimizationMetrics: + pass diff --git a/admiral-worker/core/repos/OptimizationRepo.py b/admiral-worker/core/repos/OptimizationRepo.py new file mode 100644 index 0000000..59f4bac --- /dev/null +++ b/admiral-worker/core/repos/OptimizationRepo.py @@ -0,0 +1,33 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationState import OptimizationState +from core.types.Id import Id +from core.types.IntId import IntId + + +class OptimizationRepo(ABC): + @abstractmethod + def getAll(self) -> list[Optimization]: + pass + + @abstractmethod + def get(self, id: Id[Optimization]) -> Optional[Optimization]: + pass + + @abstractmethod + def getWithState(self, state: OptimizationState) -> list[Optimization]: + pass + + @abstractmethod + def updateFirst(self, fromState: OptimizationState, toState: OptimizationState) -> Optional[Optimization]: + pass + + @abstractmethod + def setState(self, id: Id[Optimization], toState: OptimizationState) -> Optional[Optimization]: + pass + + @abstractmethod + def getLatestConfirmedByPosta(self, posta: int) -> Optional[Optimization]: + pass diff --git a/admiral-worker/core/repos/OptimizationResultRepo.py b/admiral-worker/core/repos/OptimizationResultRepo.py new file mode 100644 index 0000000..12747d4 --- /dev/null +++ b/admiral-worker/core/repos/OptimizationResultRepo.py @@ -0,0 +1,32 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationResult import OptimizationResult +from core.types.Id import Id + + +class OptimizationResultRepo(ABC): + @abstractmethod + def getAll(self) -> list[OptimizationResult]: + pass + + @abstractmethod + def getAllIds(self) -> list[Id[OptimizationResult]]: + pass + + @abstractmethod + def get(self, id: Id[OptimizationResult]) -> Optional[OptimizationResult]: + pass + + @abstractmethod + def post(self, optimizationResult: OptimizationResult) -> OptimizationResult: + pass + + @abstractmethod + def getAllByOptimizationId(self, optimizationId) -> list[OptimizationResult]: + pass + + @abstractmethod + def getLatestByOptimizationId(self, optimizationId: Id[Optimization]) -> Optional[OptimizationResult]: + pass diff --git a/admiral-worker/core/repos/OptimizationVehicleRepo.py b/admiral-worker/core/repos/OptimizationVehicleRepo.py new file mode 100644 index 0000000..bdc21b6 --- /dev/null +++ b/admiral-worker/core/repos/OptimizationVehicleRepo.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationVehicle import OptimizationVehicle +from core.types.Id import Id + + +class OptimizationVehicleRepo(ABC): + @abstractmethod + def getAll(self) -> list[OptimizationVehicle]: + pass + + @abstractmethod + def getAllByOptimizationId(self, optimizationId: Id[Optimization]) -> list[OptimizationVehicle]: + pass + + @abstractmethod + def get(self, id: Id[OptimizationVehicle]) -> Optional[OptimizationVehicle]: + pass diff --git a/admiral-worker/core/repos/WorkerJobLogRepo.py b/admiral-worker/core/repos/WorkerJobLogRepo.py new file mode 100644 index 0000000..058328b --- /dev/null +++ b/admiral-worker/core/repos/WorkerJobLogRepo.py @@ -0,0 +1,21 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.worker.WorkerJob import WorkerJob +from core.domain.worker.WorkerLog import WorkerLog +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.types.Id import Id + + +class WorkerJobLogRepo(ABC): + @abstractmethod + def getAll(self) -> list[WorkerLog]: + pass + + @abstractmethod + def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]: + pass + + @abstractmethod + def post(self, context: str, workerJobId: Id[WorkerJob], data: str, level: WorkerLogLevel) -> WorkerLog: + pass diff --git a/admiral-worker/core/repos/WorkerJobRepo.py b/admiral-worker/core/repos/WorkerJobRepo.py new file mode 100644 index 0000000..bdf739a --- /dev/null +++ b/admiral-worker/core/repos/WorkerJobRepo.py @@ -0,0 +1,19 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.worker.WorkerJob import WorkerJob +from core.types.Id import Id + + +class WorkerJobRepo(ABC): + @abstractmethod + def getAll(self) -> list[WorkerJob]: + pass + + @abstractmethod + def get(self, id: Id[WorkerJob]) -> Optional[WorkerJob]: + pass + + @abstractmethod + def post(self, obj: WorkerJob) -> WorkerJob: + pass diff --git a/admiral-worker/core/repos/WorkerJobStatusRepo.py b/admiral-worker/core/repos/WorkerJobStatusRepo.py new file mode 100644 index 0000000..08f776e --- /dev/null +++ b/admiral-worker/core/repos/WorkerJobStatusRepo.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.worker.WorkerJob import WorkerJob +from core.domain.worker.WorkerJobStatus import WorkerJobStatus +from core.types.Id import Id + + +class WorkerJobStatusRepo(ABC): + @abstractmethod + def getAll(self) -> list[WorkerJobStatus]: + pass + + @abstractmethod + def get(self, id: Id[WorkerJobStatus]) -> Optional[WorkerJobStatus]: + pass + + @abstractmethod + def post(self, workerJobId: Id[WorkerJob], ramTaken: float, cpuUtilization: float, objective: int) -> WorkerJobStatus: + pass diff --git a/admiral-worker/core/repos/WorkerLogRepo.py b/admiral-worker/core/repos/WorkerLogRepo.py new file mode 100644 index 0000000..86c4244 --- /dev/null +++ b/admiral-worker/core/repos/WorkerLogRepo.py @@ -0,0 +1,21 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerLog import WorkerLog +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.types.Id import Id + + +class WorkerLogRepo(ABC): + @abstractmethod + def getAll(self) -> list[WorkerLog]: + pass + + @abstractmethod + def get(self, id: Id[WorkerLog]) -> Optional[WorkerLog]: + pass + + @abstractmethod + def post(self, context: str, workerId: Id[Worker], data: str, level: WorkerLogLevel) -> WorkerLog: + pass diff --git a/admiral-worker/core/repos/WorkerRepo.py b/admiral-worker/core/repos/WorkerRepo.py new file mode 100644 index 0000000..eeb852d --- /dev/null +++ b/admiral-worker/core/repos/WorkerRepo.py @@ -0,0 +1,28 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerType import WorkerType +from core.types.Id import Id + + +class WorkerRepo(ABC): + @abstractmethod + def getAll(self) -> list[Worker]: + pass + + @abstractmethod + def get(self, id: Id[Worker]) -> Optional[Worker]: + pass + + @abstractmethod + def post(self, ip: str, type: WorkerType) -> Worker: + pass + + @abstractmethod + def getByIp(self, ip: str, type: WorkerType): + pass + + @abstractmethod + def deleteByIp(self, ip: str, type: WorkerType) -> int: + pass diff --git a/admiral-worker/core/repos/WorkerStatusRepo.py b/admiral-worker/core/repos/WorkerStatusRepo.py new file mode 100644 index 0000000..9a27443 --- /dev/null +++ b/admiral-worker/core/repos/WorkerStatusRepo.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractmethod +from typing import Optional + +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerStatus import WorkerStatus +from core.types.Id import Id + + +class WorkerStatusRepo(ABC): + @abstractmethod + def getAll(self) -> list[WorkerStatus]: + pass + + @abstractmethod + def get(self, id: Id[WorkerStatus]) -> Optional[WorkerStatus]: + pass + + @abstractmethod + def post(self, workerId: Id[Worker], ramAvailable: float, cpuUtilization: float) -> WorkerStatus: + pass diff --git a/admiral-worker/core/services/FtpService.py b/admiral-worker/core/services/FtpService.py new file mode 100644 index 0000000..9782ccb --- /dev/null +++ b/admiral-worker/core/services/FtpService.py @@ -0,0 +1,28 @@ +from abc import ABC, abstractmethod +from pathlib import Path + + +class FtpService(ABC): + @abstractmethod + def download(self, path: Path): + pass + + @abstractmethod + def upload(self, path: Path): + pass + + @abstractmethod + def rename(self, oldPath: Path, newPath: Path): + pass + + @abstractmethod + def delete(self, path: Path): + pass + + @abstractmethod + def copy(self, path: Path, newPath: Path): + pass + + @abstractmethod + def scan(self) -> list[Path]: + pass diff --git a/admiral-worker/core/services/OptimizationService.py b/admiral-worker/core/services/OptimizationService.py new file mode 100644 index 0000000..e85fb44 --- /dev/null +++ b/admiral-worker/core/services/OptimizationService.py @@ -0,0 +1,26 @@ +from abc import ABC, abstractmethod +from typing import Callable, Optional + +from core.domain.map.RouteMatrix import RouteMatrix +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.OptimizationResultData import OptimizationResultData +from core.domain.optimization.OptimizationSolution import OptimizationSolution +from core.domain.optimization.OptimizationVehicle import OptimizationVehicle +from core.domain.optimization.TransportMode import TransportMode +from core.types.Logger import Logger + + +class OptimizationService(ABC): + @abstractmethod + def vrpOptimization( + self, + optimization: Optimization, + optimizationVehicles: list[OptimizationVehicle], + optimizationPoints: list[OptimizationPoint], + routeMatrices: dict[TransportMode, RouteMatrix], + solutionCallback: Callable[[int, list[OptimizationSolution], bool, list[OptimizationPoint], Optional[dict[int, float]]], None], + terminationCallback: Callable[[], bool], + log: Logger, + initialOptimizationResultData: Optional[OptimizationResultData]): + pass diff --git a/admiral-worker/core/services/RoutingService.py b/admiral-worker/core/services/RoutingService.py new file mode 100644 index 0000000..f361156 --- /dev/null +++ b/admiral-worker/core/services/RoutingService.py @@ -0,0 +1,21 @@ +from abc import ABC, abstractmethod + +from core.domain.map.GeoLocation import GeoLocation +from core.domain.map.RouteInfo import RouteInfo +from core.domain.map.RouteMatrix import RouteMatrix +from core.domain.optimization.TransportMode import TransportMode + + +class RoutingService(ABC): + + @abstractmethod + def getRouteMatrix(self, geoLocations: list[GeoLocation], transportMode: TransportMode) -> RouteMatrix: + pass + + @abstractmethod + def getRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation]) -> RouteInfo: + pass + + @abstractmethod + def getAverageRouteInfo(self, transportMode: TransportMode, legs: list[GeoLocation], probability: list[float], iterations: int) -> RouteInfo: + pass diff --git a/admiral-worker/core/services/SystemService.py b/admiral-worker/core/services/SystemService.py new file mode 100644 index 0000000..568c61d --- /dev/null +++ b/admiral-worker/core/services/SystemService.py @@ -0,0 +1,41 @@ +from abc import ABC, abstractmethod +from typing import Optional + + +class SystemService(ABC): + + @abstractmethod + def getIp(self) -> str: + pass + + @abstractmethod + def getCpuUtilization(self) -> float: + pass + + @abstractmethod + def getRamMbAvailable(self) -> float: + pass + + @abstractmethod + def getMaxRamMbAvailable(self) -> float: + pass + + @abstractmethod + def getCpuAvailable(self) -> int: + pass + + @abstractmethod + def getProcessCpu(self, pid: int = None) -> Optional[float]: + pass + + @abstractmethod + def getProcessRam(self, pid: int = None) -> Optional[float]: + pass + + @abstractmethod + def killProcess(self, pid: int = None): + pass + + @abstractmethod + def terminateProcess(self, pid: int = None): + pass diff --git a/admiral-worker/core/types/AutoStrEnum.py b/admiral-worker/core/types/AutoStrEnum.py new file mode 100644 index 0000000..8897a05 --- /dev/null +++ b/admiral-worker/core/types/AutoStrEnum.py @@ -0,0 +1,14 @@ +from enum import Enum + + +class AutoStrEnum(str, Enum): + """ + StrEnum where enum.auto() returns the field name. + See https://docs.python.org/3.9/library/enum.html#using-automatic-values + """ + + @staticmethod + def _generate_next_value_(name: str, start: int, count: int, last_values: list) -> str: + return name + # Or if you prefer, return lower-case member (it's StrEnum default behavior since Python 3.11): + # return name.lower() diff --git a/admiral-worker/core/types/Id.py b/admiral-worker/core/types/Id.py new file mode 100644 index 0000000..277dfdd --- /dev/null +++ b/admiral-worker/core/types/Id.py @@ -0,0 +1,35 @@ +import uuid +from dataclasses import field +from typing import Generic, TypeVar + +from typing_extensions import Self + +from pydantic import model_serializer, BaseModel + +T = TypeVar('T') + + +class Id(BaseModel, Generic[T]): + value: uuid.UUID + + def __init__(self, value: uuid.UUID = None): + value = value if value is not None else uuid.uuid4() + super().__init__(value=value) + self.value = value + + @staticmethod + def field(): + return field(default_factory=Id) + + def __str__(self): + return str(self.value) + + def __eq__(self, other: Self): + return self.value == other.value + + def __hash__(self): + return hash(self.value) + + @model_serializer + def model_serializer(self) -> str: + return self.__str__() diff --git a/admiral-worker/core/types/Logger.py b/admiral-worker/core/types/Logger.py new file mode 100644 index 0000000..d29079a --- /dev/null +++ b/admiral-worker/core/types/Logger.py @@ -0,0 +1,15 @@ +from abc import ABC, abstractmethod + + +class Logger(ABC): + @abstractmethod + def info(self, data): + pass + + @abstractmethod + def warning(self, data): + pass + + @abstractmethod + def error(self, data): + pass diff --git a/admiral-worker/core/usecases/Run_optimization_worker.py b/admiral-worker/core/usecases/Run_optimization_worker.py new file mode 100644 index 0000000..b5f5120 --- /dev/null +++ b/admiral-worker/core/usecases/Run_optimization_worker.py @@ -0,0 +1,123 @@ +from datetime import datetime, timedelta + +import logging +import time +import traceback +from dataclasses import dataclass +from multiprocessing import Process +from typing import Callable + +from core.domain.optimization.OptimizationState import OptimizationState +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerJob import WorkerJob +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.domain.worker.WorkerState import WorkerState +from core.domain.worker.WorkerType import WorkerType +from core.repos.OptimizationRepo import OptimizationRepo +from core.repos.WorkerJobRepo import WorkerJobRepo +from core.repos.WorkerLogRepo import WorkerLogRepo +from core.repos.WorkerRepo import WorkerRepo +from core.services.SystemService import SystemService +from core.types.Id import Id +from core.usecases.initialization.Register_worker import Register_worker + +log = logging.getLogger(__name__) + + +@dataclass +class Run_optimization_worker: + register_worker: Register_worker + workerLogRepo: WorkerLogRepo + optimizationRepo: OptimizationRepo + workerJobRepo: WorkerJobRepo + systemService: SystemService + init_run_optimization_job: Callable[[WorkerJob], None] + init_log_worker_status: Callable[[Id[Worker]], None] + workerRepo: WorkerRepo + + def getActiveOptimizationsWarmingUp(self, stateChangedDuration: timedelta): + activeOptimizations = self.optimizationRepo.getWithState(state=OptimizationState.ACTIVE) + return list(filter(lambda opt: (datetime.now() - opt.stateChangedAt) < stateChangedDuration, activeOptimizations)) + + def now(self): + log.info("Register worker") + result = self.register_worker.now(workerType=WorkerType.OPTIMIZER) + workerId = result.worker.id + + log.info("Create new daemon process for the worker logging") + job = Process(target=self.init_log_worker_status, kwargs={'id': workerId}, name=result.worker.name) + job.daemon = False + job.start() + + log.info("Waiting for worker events...") + while True: + try: + log.debug("===[END OPTIMIZATION WORKER]=============================") + log.debug("") + log.debug("") + log.debug("") + time.sleep(5) + log.debug("===[RUN OPTIMIZATION WORKER]=============================") + + worker = self.workerRepo.get(id=workerId) + if worker.state == WorkerState.DEPRECATED: + log.info(f"Worker {worker.id} is deprecated") + continue + + maxRamOptimizationReserved = 7000 + availableCpu = self.systemService.getCpuAvailable() + maxMbRam = self.systemService.getMaxRamMbAvailable() + availableMbRam = self.systemService.getRamMbAvailable() + takenMbRam = maxMbRam - availableMbRam + activeOptimizationsWarmingUp = self.getActiveOptimizationsWarmingUp(stateChangedDuration=timedelta(hours=1)) + virtualTakenMbRam = takenMbRam + (maxRamOptimizationReserved * len(activeOptimizationsWarmingUp)) + virtualFreeMbRam = maxMbRam - virtualTakenMbRam + + log.info(", ".join([ + f"Worker {worker.id}: {worker.state.name}", + f"CPU={availableCpu}", + f"WARMUP={len(activeOptimizationsWarmingUp)}", + f"VIR_FREE_RAM={round(virtualFreeMbRam / 1e3, 1)}Gb", + f"FREE_RAM={round(availableMbRam / 1e3, 1)}Gb", + f"MAX_RAM={round(maxMbRam / 1e3, 1)}Gb", + ])) + + if availableCpu == 0: + log.warn("System is missing available cpus") + continue + + if virtualFreeMbRam < maxRamOptimizationReserved: + log.warn("System is missing available ram") + continue + + log.debug("Get first optimization and update it into active state") + activeOptimization = self.optimizationRepo.updateFirst(fromState=OptimizationState.CREATED, toState=OptimizationState.ACTIVE) + + if activeOptimization is None: + log.debug(f"Active optimization not found") + continue + + log.info(f"Active optimization found") + log.info("Create new worker job") + workerJob = WorkerJob( + workerId=workerId, + optimizationId=activeOptimization.id, + name=activeOptimization.title, + state=OptimizationState.CREATED + ) + self.workerJobRepo.post(obj=workerJob) + + log.info("Create new daemon process for the optimization") + job = Process(target=self.init_run_optimization_job, kwargs={'workerJob': workerJob}, + name=f"{result.worker.name} {activeOptimization.type.value} '{workerJob.name}'") + job.daemon = False + job.start() + + log.info(f"Job process: PID={job.pid}, NAME={job.name}, DEAMON={job.daemon}") + + except BaseException as err: + log.error(err) + log.error(traceback.format_exc()) + self.workerLogRepo.post(context=__name__, workerId=workerId, data=f"Exception: {err}", level=WorkerLogLevel.ERROR) + + job.terminate() diff --git a/admiral-worker/core/usecases/initialization/Register_worker.py b/admiral-worker/core/usecases/initialization/Register_worker.py new file mode 100644 index 0000000..4342ebf --- /dev/null +++ b/admiral-worker/core/usecases/initialization/Register_worker.py @@ -0,0 +1,46 @@ +import logging +from dataclasses import dataclass + +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.domain.worker.WorkerType import WorkerType +from core.repos.WorkerLogRepo import WorkerLogRepo +from core.repos.WorkerRepo import WorkerRepo +from core.repos.WorkerStatusRepo import WorkerStatusRepo +from core.services.SystemService import SystemService + +log = logging.getLogger(__name__) + + +@dataclass +class Register_worker: + workerRepo: WorkerRepo + workerStatusRepo: WorkerStatusRepo + workerLogRepo: WorkerLogRepo + systemService: SystemService + + class Result: + @dataclass + class WorkerAlreadyExists: + worker: Worker + + @dataclass + class Ok: + worker: Worker + + def now(self, workerType: WorkerType) -> Result.Ok | Result.WorkerAlreadyExists: + log.info("Get current public ip") + ip = self.systemService.getIp() + + log.info("Check if worker already exists") + worker = self.workerRepo.getByIp(ip=ip, type=workerType) + if worker is not None: + return self.Result.WorkerAlreadyExists(worker=worker) + + log.info("Register worker by ip") + worker = self.workerRepo.post(ip=ip, type=workerType) + + log.info("Log worker registration and initial status") + self.workerLogRepo.post(context=__name__, workerId=worker.id, data=f"Registering '{workerType}' worker with ip: '{ip}'", level=WorkerLogLevel.INFO) + + return self.Result.Ok(worker=worker) diff --git a/admiral-worker/core/usecases/logging/Log_worker_job_status.py b/admiral-worker/core/usecases/logging/Log_worker_job_status.py new file mode 100644 index 0000000..ab48f2d --- /dev/null +++ b/admiral-worker/core/usecases/logging/Log_worker_job_status.py @@ -0,0 +1,22 @@ +import logging +from dataclasses import dataclass + +from core.domain.worker.WorkerJob import WorkerJob +from core.repos.WorkerJobStatusRepo import WorkerJobStatusRepo +from core.services.SystemService import SystemService +from core.types.Id import Id + +log = logging.getLogger(__name__) + + +@dataclass +class Log_worker_job_status: + workerJobStatusRepo: WorkerJobStatusRepo + systemService: SystemService + + def now(self, id: Id[WorkerJob]): + self.workerJobStatusRepo.post( + workerJobId=id, + ramTaken=self.systemService.getProcessRam(), + cpuUtilization=self.systemService.getProcessCpu(), + ) diff --git a/admiral-worker/core/usecases/logging/Log_worker_status.py b/admiral-worker/core/usecases/logging/Log_worker_status.py new file mode 100644 index 0000000..97315f5 --- /dev/null +++ b/admiral-worker/core/usecases/logging/Log_worker_status.py @@ -0,0 +1,33 @@ +import logging +import time +from dataclasses import dataclass + +from core.domain.worker.Worker import Worker +from core.domain.worker.WorkerStatus import WorkerStatus +from core.repos.WorkerRepo import WorkerRepo +from core.repos.WorkerStatusRepo import WorkerStatusRepo +from core.services.SystemService import SystemService +from core.types.Id import Id + +log = logging.getLogger(__name__) + + +@dataclass +class Log_worker_status: + workerRepo: WorkerRepo + workerStatusRepo: WorkerStatusRepo + systemService: SystemService + + def now(self, id: Id[Worker]): + log.debug("Check if worker exists and is registered") + + worker = self.workerRepo.get(id=id) + + while True: + log.debug("Log worker status") + self.workerStatusRepo.post( + workerId=worker.id, + ramAvailable=self.systemService.getRamMbAvailable(), + cpuUtilization=self.systemService.getCpuUtilization() + ) + time.sleep(1) diff --git a/admiral-worker/core/usecases/optimization_worker/Run_optimization_job.py b/admiral-worker/core/usecases/optimization_worker/Run_optimization_job.py new file mode 100644 index 0000000..620713c --- /dev/null +++ b/admiral-worker/core/usecases/optimization_worker/Run_optimization_job.py @@ -0,0 +1,356 @@ +import logging +import os +import traceback +from dataclasses import dataclass, field +from datetime import datetime, timedelta +from inspect import getframeinfo, stack +from pathlib import Path +from typing import Optional, override + +from core.Utils import percentage +from core.domain.map.RouteInfo import RouteInfo +from core.domain.optimization.Optimization import Optimization +from core.domain.optimization.OptimizationFiles import OptimizationFiles +from core.domain.optimization.OptimizationMetrics import OptimizationMetrics +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.OptimizationPointType import OptimizationPointType +from core.domain.optimization.OptimizationResult import OptimizationResult +from core.domain.optimization.OptimizationResultData import OptimizationResultData +from core.domain.optimization.OptimizationRoute import OptimizationRoute +from core.domain.optimization.OptimizationSolution import OptimizationSolution +from core.domain.optimization.OptimizationState import OptimizationState +from core.domain.optimization.TransportMode import TransportMode +from core.domain.worker.WorkerJob import WorkerJob +from core.domain.worker.WorkerLogLevel import WorkerLogLevel +from core.repos.OptimizationMetricsRepo import OptimizationMetricsRepo +from core.repos.OptimizationRepo import OptimizationRepo +from core.repos.OptimizationResultRepo import OptimizationResultRepo +from core.repos.OptimizationVehicleRepo import OptimizationVehicleRepo +from core.repos.PostOfficeRepo import PostOfficeRepo +from core.repos.WorkerJobLogRepo import WorkerJobLogRepo +from core.repos.WorkerLogRepo import WorkerLogRepo +from core.types.Logger import Logger +from core.services.OptimizationService import OptimizationService +from core.services.RoutingService import RoutingService +from core.services.SystemService import SystemService +from core.types.Id import Id +from core.usecases.logging.Log_worker_job_status import Log_worker_job_status +from core.usecases.optimization_worker.Read_optimization_files import Read_optimization_files +from core.usecases.optimization_worker.Update_optimization_points import Update_optimization_points + +logger = logging.getLogger(__name__) + + +@dataclass +class Run_optimization_job: + workerLogRepo: WorkerLogRepo + workerJobLogRepo: WorkerJobLogRepo + log_worker_job_status: Log_worker_job_status + read_optimization_files: Read_optimization_files + update_optimization_points: Update_optimization_points + optimizationVehicleRepo: OptimizationVehicleRepo + optimizationRepo: OptimizationRepo + postOfficeRepo: PostOfficeRepo + optimizationService: OptimizationService + optimizationResultRepo: OptimizationResultRepo + routingService: RoutingService + systemService: SystemService + optimizationMetricsRepo: OptimizationMetricsRepo + + @dataclass + class TerminationCallback: + ctx: 'Run_optimization_job' + optimization: Optimization + pid: int + log: Logger + + def __call__(self): + optimizationState = self.ctx.optimizationRepo.get(id=self.optimization.id).state + terminate = optimizationState != OptimizationState.ACTIVE + if terminate: + self.log.info(f"Optimization terminated: PID={self.pid}") + self.ctx.systemService.killProcess(pid=self.pid) + return False + + @dataclass + class WorkerLogger(Logger): + ctx: 'Run_optimization_job' + workerJob: WorkerJob + workerLogRepo: WorkerLogRepo + workerJobLogRepo: WorkerJobLogRepo + + def log(self, level: int, workerLogLevel: WorkerLogLevel, data): + dataStr = str(data) + caller = getframeinfo(stack()[2][0]) + fileName = Path(caller.filename) + context = f"{fileName.name}:{caller.lineno}" + logger.log(level, dataStr, stacklevel=3) + self.workerLogRepo.post(context=context, workerId=self.workerJob.workerId, data=dataStr, level=workerLogLevel) + self.workerJobLogRepo.post(context=context, workerJobId=self.workerJob.id, data=dataStr, level=workerLogLevel) + + @override + def info(self, data): + self.log(logging.INFO, WorkerLogLevel.INFO, data) + + @override + def warning(self, data): + self.log(logging.WARNING, WorkerLogLevel.WARN, data) + + @override + def error(self, data): + self.log(logging.ERROR, WorkerLogLevel.ERROR, data) + + @dataclass + class SolutionCallback: + + ctx: 'Run_optimization_job' + workerJob: WorkerJob + optimization: Optimization + optimizationFiles: OptimizationFiles + log_worker_job_status: Log_worker_job_status + + log: Logger + + optimizationResultId: Id[OptimizationResult] = Id.field() + lastSolutionProcessedAt: datetime = field(default_factory=datetime.now) + lastObjective: Optional[int] = field(default=None) + currentNumberOfDistricts: int = field(default=None) + solutionCount: int = field(default=0) + locked: bool = field(default=False) + crn_optimizationPointIndex: dict[int, int] = field(default_factory=lambda: {}) + + def __post_init__(self): + self.log.info("Mapping optimization points") + for i, op in enumerate(self.optimizationFiles.optimizationPoints): + self.crn_optimizationPointIndex[op.crnPoint.hisa] = i + + def __call__(self, objective: int, optimizationSolution: list[OptimizationSolution], finished: bool, unvisited: list[OptimizationPoint], + overlapping: Optional[dict[int, float]]): + try: + self.solutionCount += 1 + if self.lastObjective is None: + self.lastObjective = objective + if self.currentNumberOfDistricts is None: + self.currentNumberOfDistricts = len(optimizationSolution) + + diff = (datetime.now() - self.lastSolutionProcessedAt) + + # If number of district changed SAVE with PRIORITY! + if self.currentNumberOfDistricts != len(optimizationSolution): + self.currentNumberOfDistricts = len(optimizationSolution) + + # Save first optimization that has changed + self.log.info(f"Solution has changed number of districts: {self.currentNumberOfDistricts}") + self.log.info(f"Solution is accepted: {self.solutionCount}. age={diff}, cost={objective}, diff={objective - self.lastObjective}") + + self.optimizationResultId = Id() # Make new optimization result (do not override it with the old one) + self.save(objective=objective, optimizationSolution=optimizationSolution, finished=finished, unvisited=unvisited, + overlapping=overlapping) + self.lastObjective = objective + + self.optimizationResultId = Id() # Make new optimization result (do not override it with the old one) + self.save(objective=objective, optimizationSolution=optimizationSolution, finished=finished, unvisited=unvisited, + overlapping=overlapping) + self.lastObjective = objective + + self.log.info("Solution callback ready to accept new solutions") + self.lastSolutionProcessedAt = datetime.now() + return + + if self.locked: + self.log.info("Callback is locked") + return + + # Callback is not locked so lets lock it. + self.locked = True + + # If optimization is not first nor final then terminate saving the solution if its too early to save it. + if not finished and self.solutionCount > 1: + # If solution age is less than minute or there is no solution improvement then ignore solution + if diff < timedelta(minutes=1) or objective > self.lastObjective: + # Execute return release the lock + self.locked = False + return + + self.log.info(f"Solution is accepted: {self.solutionCount}. age={diff}, cost={objective}, diff={objective - self.lastObjective}") + self.save(objective=objective, optimizationSolution=optimizationSolution, finished=finished, unvisited=unvisited, + overlapping=overlapping) + self.lastObjective = objective + self.log.info("Solution callback ready to accept new solutions") + self.lastSolutionProcessedAt = datetime.now() + + except Exception: + self.log.error(traceback.format_exc()) + try: + self.ctx.optimizationRepo.setState(id=self.optimization.id, toState=OptimizationState.FAILED) + except: + pass + + self.locked = False + + def save(self, objective: int, optimizationSolution: list[OptimizationSolution], finished: bool, unvisited: list[OptimizationPoint], + overlapping: Optional[dict[int, float]]): + optimizationResult = OptimizationResult( + id=self.optimizationResultId, optimizationId=self.optimization.id, routes=[], + info=f"{'Finished' if finished else 'Unfinished'} optimization result v{self.solutionCount} d{len(optimizationSolution)}", + authorizedByUserId=self.optimization.authorizedByUserId, parent=None, + unvisited=unvisited) + + optimizationMetrics = OptimizationMetrics( + optimizationId=self.optimization.id, solution=self.solutionCount, + vehicles=len(optimizationSolution), + cost=objective, duration=0, distance=0, createdAt=datetime.now(), overlapping=overlapping) + + for routeI, route in enumerate(optimizationSolution): + # Inject depo as first point in the route manualy!!! + route.hise.insert(0, route.hise[-1]) + + vehicle = self.ctx.optimizationVehicleRepo.get(route.optimizationVehicleId) + + # Find legs of optimization result + routePoints = [self.optimizationFiles.optimizationPoints[self.crn_optimizationPointIndex[hisa]] for hisa in route.hise] + + try: + routeInfo = self.ctx.routingService.getRouteInfo( + transportMode=vehicle.type, + legs=[point.crnPoint.microLocation for point in routePoints] + ) + except Exception: + self.log.error(traceback.format_exc()) + return + + try: + averageRouteInfo = self.ctx.routingService.getAverageRouteInfo( + transportMode=vehicle.type, + legs=[point.crnPoint.microLocation for point in routePoints], + probability=[point.visitFrequency for point in routePoints], + iterations=10 + ) + except Exception: + self.log.error(traceback.format_exc()) + return + + serviceTimeFreqSum = sum([(rp.serviceTime.total_seconds() * rp.visitFrequency) for rp in routePoints]) + + if finished: + matrixRouteDuration = timedelta(seconds=0) + matrixRouteDistance = 0 + for i in range(len(route.hise) - 1): + startIndex = self.crn_optimizationPointIndex[route.hise[i]] + endIndex = self.crn_optimizationPointIndex[route.hise[i + 1]] + matrixRouteDuration += timedelta( + seconds=float(self.optimizationFiles.routeMatrices[vehicle.type].durations[startIndex][endIndex]) / 1000 + ) + matrixRouteDistance += self.optimizationFiles.routeMatrices[vehicle.type].distances[startIndex][endIndex] + + # Transform + optimizationRoute = OptimizationRoute( + name=route.district if route.district is not None else f"O{routeI + 1}", + isExtra=route.isExtra, + index=routeI, + cost=route.cost, + + # THIS MUST BE IN SYNC WITH Run_optimization_job.save WHERE OPTIMIZATION ROUTE IS CALCULATED!!! + distance=routeInfo.distance, + averageDistance=averageRouteInfo.distance, + + # THIS MUST BE IN SYNC WITH Run_optimization_job.save WHERE OPTIMIZATION ROUTE IS CALCULATED!!! + duration=int(routeInfo.duration + serviceTimeFreqSum), + averageDuration=int(averageRouteInfo.duration + serviceTimeFreqSum), + + vehicle=vehicle, + points=routePoints, + steps=routeInfo.steps, + + ) + + optimizationRoute.setHash() + optimizationResult.routes.append(optimizationRoute) + + optimizationMetrics.distance += optimizationRoute.distance + optimizationMetrics.duration += optimizationRoute.duration + + # Sort optimization routes by name and fix index" + optimizationResult.routes.sort(key=lambda r: r.name) + for i, route in enumerate(optimizationResult.routes): + route.index = i + + # Save optimization routes solution and metrics to database") + self.ctx.optimizationResultRepo.post(optimizationResult=optimizationResult) + self.ctx.optimizationMetricsRepo.post(optimizationMetrics=optimizationMetrics) + self.log.info("Optimization solution successfully saved") + + if finished: + self.log.info("Set optimization state to completed") + self.ctx.workerLogRepo.post(context=__name__, workerId=self.workerJob.workerId, + data=f"Saving optimization status: {self.optimization.posta}", level=WorkerLogLevel.INFO) + self.ctx.optimizationRepo.setState(id=self.optimization.id, toState=OptimizationState.COMPLETED) + + self.log.info("End info logging") + self.ctx.workerJobLogRepo.post(context=__name__, workerJobId=self.workerJob.id, data=f"Start worker job: {self.workerJob.id}", + level=WorkerLogLevel.INFO) + self.ctx.workerLogRepo.post(context=__name__, workerId=self.workerJob.workerId, + data=f"Optimization finished: {self.optimization.posta}", level=WorkerLogLevel.INFO) + + self.log.info("Optimization successfully finished") + + def now(self, workerJob: WorkerJob): + optimization: Optional[Optimization] = None + log = self.WorkerLogger(ctx=self, workerJob=workerJob, workerLogRepo=self.workerLogRepo, workerJobLogRepo=self.workerJobLogRepo) + + try: + log.warning(f"Start new optimization worker job: {workerJob}") + + log.info("Get all optimization informations") + optimization: Optimization = self.optimizationRepo.get(id=workerJob.optimizationId) + optimizationVehicles = self.optimizationVehicleRepo.getAllByOptimizationId(optimizationId=workerJob.optimizationId) + log.info(f"Found optimization: {optimization}'") + + log.info("Get postOffice") + postOffice = self.postOfficeRepo.get(posta=optimization.posta.value) + log.info(f"Found post office: {postOffice}") + + log.info("Reading optimization files") + optimizationFiles = self.read_optimization_files.now(posta=postOffice.posta) + + latestConfOptResultData = None + if optimization.parent is not None: + latestConfOptResult = self.optimizationResultRepo.getLatestByOptimizationId(optimizationId=optimization.parent) + latestConfOptResultData = OptimizationResultData(optimization=optimization, optimizationResult=latestConfOptResult) + log.info(f"Get latest confirmed optimization result: {latestConfOptResult.info}") + log.info(f"Filter only CRN points from latest confirmed optimization result") + for i, route in enumerate(latestConfOptResult.routes): + routesCrnPoints = list(filter(lambda op: op.type == OptimizationPointType.CRN, route.points)) + log.info(f"{i}. Route crn points[{len(routesCrnPoints)}/{len(route.points)}]: {percentage(routesCrnPoints, route.points)}%") + route.points = routesCrnPoints + + log.info("Update optimization files") + optimizationFiles.optimizationPoints = self.update_optimization_points.now( + optimization=optimization, optimizationPoints=optimizationFiles.optimizationPoints) + + log.info("Create solution callback") + solutionCallback = self.SolutionCallback(ctx=self, workerJob=workerJob, optimization=optimization, optimizationFiles=optimizationFiles, + log_worker_job_status=self.log_worker_job_status, log=log) + + log.info("Create termination callback") + terminationCallback = self.TerminationCallback(ctx=self, optimization=optimization, pid=os.getpid(), log=log) + + log.info("Start VRP optimization") + self.optimizationService.vrpOptimization( + optimization=optimization, + optimizationVehicles=optimizationVehicles, + optimizationPoints=optimizationFiles.optimizationPoints, + routeMatrices=optimizationFiles.routeMatrices, + solutionCallback=solutionCallback, + terminationCallback=terminationCallback, + log=log, + initialOptimizationResultData=latestConfOptResultData, + ) + + log.info("Finish VRP optimization") + + except BaseException as err: + if optimization is not None: + self.optimizationRepo.setState(id=optimization.id, toState=OptimizationState.FAILED) + log.error(traceback.format_exc()) + raise err diff --git a/admiral-worker/docker-compose.yaml b/admiral-worker/docker-compose.yaml new file mode 100644 index 0000000..01b7a63 --- /dev/null +++ b/admiral-worker/docker-compose.yaml @@ -0,0 +1,15 @@ +services: + app: + build: . + env_file: + - .env + postgres: + image: postgres:14-alpine + ports: + - 5432:5432 + volumes: + - ~/apps/postgres:/var/lib/postgresql/data + environment: + - POSTGRES_PASSWORD=Solvesall123 + - POSTGRES_USER=sa + - POSTGRES_DB=master diff --git a/admiral-worker/requirements.txt b/admiral-worker/requirements.txt new file mode 100644 index 0000000..5f66910 --- /dev/null +++ b/admiral-worker/requirements.txt @@ -0,0 +1,28 @@ +# MakeFile +coverage + +# Shared +python-dotenv + +# Core + +# App Services +pydantic +pandas +pypolyline +sqlmodel +pyproj +requests +psutil +numpy==1.26.0 +scikit-learn +ortools +sortedcontainers + +# App Repoositories +SQLAlchemy +sqlmodel +pyodbc + +# Visualization +folium diff --git a/admiral-worker/tests/__init__.py b/admiral-worker/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_core/__init__.py b/admiral-worker/tests/test_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_core/test_fs.py b/admiral-worker/tests/test_core/test_fs.py new file mode 100644 index 0000000..e9512bf --- /dev/null +++ b/admiral-worker/tests/test_core/test_fs.py @@ -0,0 +1,10 @@ +import unittest + +from core.extend import fs + + +class test_fs(unittest.TestCase): + + def test_getPath(self): + path = str(fs.getPath(__file__, "test", "hello")) + self.assertTrue(path.endswith("/admiral-worker/tests/test_core/test/hello")) diff --git a/admiral-worker/tests/test_e2e/test_routingEngine/test_main.py b/admiral-worker/tests/test_e2e/test_routingEngine/test_main.py new file mode 100644 index 0000000..7100c85 --- /dev/null +++ b/admiral-worker/tests/test_e2e/test_routingEngine/test_main.py @@ -0,0 +1,56 @@ +import inspect +import logging +import unittest + +import folium +from folium import TileLayer + +from app.App import App +from core.domain.map.GeoLocation import GeoLocation +from core.domain.optimization.TransportMode import TransportMode +from core.extend import fs + +log = logging.getLogger(__name__) + + +class Test_main(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.dataDir = fs.getPath(__file__, "test_data") + App.init() + + @classmethod + def __drawRouteInfo(cls, transportMode: TransportMode, legs: list[GeoLocation], testName: str): + output = cls.dataDir.joinpath(f"{testName}.html") + routeInfo = App.services.routingService.getRouteInfo(transportMode=TransportMode.CAR, legs=legs) + print(routeInfo) + + latSum = 0 + lonSum = 0 + + for g in routeInfo.steps: + latSum += g.lat + lonSum += g.lon + + folium_map = folium.Map(location=[latSum / len(routeInfo.steps), lonSum / len(routeInfo.steps)], zoom_start=18, max_zoom=30, + tiles=TileLayer(max_zoom=30, max_native_zoom=30, name="OpenStreetMap")) + + folium.PolyLine(locations=[(step.lat, step.lon) for step in routeInfo.steps], color="blue", weight=2.5, opacity=0.8).add_to(folium_map) + + folium_map.save(output) + + def test_000(self): + testName = inspect.currentframe().f_code.co_name + + legs=[ + GeoLocation(lat=46.56157837316846, lon=15.036126073183372), + GeoLocation(lat=46.55359554022851, lon=15.030295168115506) + ] + + self.__drawRouteInfo(transportMode=TransportMode.CAR, legs=legs, testName=testName + "_car") + self.__drawRouteInfo(transportMode=TransportMode.EV, legs=legs, testName=testName + "_ev") + self.__drawRouteInfo(transportMode=TransportMode.MK, legs=legs, testName=testName + "_mk") + + +if __name__ == '__main__': + unittest.main() diff --git a/admiral-worker/tests/test_repos/__init__.py b/admiral-worker/tests/test_repos/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_repos/test_OptimizationPostmanRepo.py b/admiral-worker/tests/test_repos/test_OptimizationPostmanRepo.py new file mode 100644 index 0000000..72f8057 --- /dev/null +++ b/admiral-worker/tests/test_repos/test_OptimizationPostmanRepo.py @@ -0,0 +1,17 @@ +import unittest + +from app.App import App + + +class test_OptimizationPostmanRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + optimizationPostmans = App.repos.optimizationPostmanRepo.getAllConfirmed() + self.assertGreater(len(optimizationPostmans), 0) + + optimization = App.repos.optimizationPostmanRepo.get(posta=optimizationPostmans[0].id) + self.assertIn(optimization, optimizationPostmans) diff --git a/admiral-worker/tests/test_repos/test_OptimizationRepo.py b/admiral-worker/tests/test_repos/test_OptimizationRepo.py new file mode 100644 index 0000000..b3af0fa --- /dev/null +++ b/admiral-worker/tests/test_repos/test_OptimizationRepo.py @@ -0,0 +1,35 @@ +import unittest + +from app.App import App +from core.domain.optimization.OptimizationState import OptimizationState + + +class test_OptimizationRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + optimizations = App.repos.optimizationRepo.getAll() + self.assertGreater(len(optimizations), 0) + + optimization = App.repos.optimizationRepo.get(id=optimizations[0].id) + self.assertIn(optimization, optimizations) + + def test_getWithState(self): + for opti in App.repos.optimizationRepo.getAll(): + print(opti.id, opti.state) + App.repos.optimizationRepo.setState(opti.id, toState=OptimizationState.COMPLETED) + print("=====================") + for opti in App.repos.optimizationRepo.getAll(): + print(opti.id, opti.state) + + + def test_updateFirst(self): + opti = App.repos.optimizationRepo.updateFirst(fromState=OptimizationState.CREATED, toState=OptimizationState.ACTIVE) + print(opti) + + def test_getLatest(self): + opt = App.repos.optimizationRepo.getLatestConfirmedByPosta(posta = 9710) + print(opt) diff --git a/admiral-worker/tests/test_repos/test_OptimizationResultRepo.py b/admiral-worker/tests/test_repos/test_OptimizationResultRepo.py new file mode 100644 index 0000000..592e6cf --- /dev/null +++ b/admiral-worker/tests/test_repos/test_OptimizationResultRepo.py @@ -0,0 +1,15 @@ +import unittest + +from app.App import App + + +class test_OptimizationResultRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_getLatest(self): + optL = App.repos.optimizationRepo.getLatestConfirmedByPosta(posta = 9710) + opt = App.repos.optimizationResultRepo.getLatestByOptimizationId(optimizationId=optL.id) + print(opt.id) diff --git a/admiral-worker/tests/test_repos/test_OptimizationVehicleRepo.py b/admiral-worker/tests/test_repos/test_OptimizationVehicleRepo.py new file mode 100644 index 0000000..43ee028 --- /dev/null +++ b/admiral-worker/tests/test_repos/test_OptimizationVehicleRepo.py @@ -0,0 +1,18 @@ +import unittest + +from app.App import App + + +class test_OptimizationVehicleRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + optimizationVehicles = App.repos.optimizationVehicleRepo.getAll() + print(optimizationVehicles) + self.assertGreater(len(optimizationVehicles), 0) + + optimization = App.repos.optimizationVehicleRepo.get(id=optimizationVehicles[0].id) + self.assertIn(optimization, optimizationVehicles) diff --git a/admiral-worker/tests/test_repos/test_WorkerJobLogRepo.py b/admiral-worker/tests/test_repos/test_WorkerJobLogRepo.py new file mode 100644 index 0000000..6ead44b --- /dev/null +++ b/admiral-worker/tests/test_repos/test_WorkerJobLogRepo.py @@ -0,0 +1,17 @@ +import unittest + +from app.App import App + + +class test_WorkerJobLogRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + workerJobLogs = App.repos.workerJobLogRepo.getAll() + self.assertGreater(len(workerJobLogs), 0) + + workerJobLog = App.repos.workerJobLogRepo.get(id=workerJobLogs[0].id) + self.assertIn(workerJobLog, workerJobLogs) diff --git a/admiral-worker/tests/test_repos/test_WorkerJobRepo.py b/admiral-worker/tests/test_repos/test_WorkerJobRepo.py new file mode 100644 index 0000000..7410ced --- /dev/null +++ b/admiral-worker/tests/test_repos/test_WorkerJobRepo.py @@ -0,0 +1,17 @@ +import unittest + +from app.App import App + + +class test_WorkerJobRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + workerJobs = App.repos.workerJobRepo.getAll() + self.assertGreater(len(workerJobs), 0) + + workerJob = App.repos.workerJobRepo.get(id=workerJobs[0].id) + self.assertIn(workerJob, workerJobs) diff --git a/admiral-worker/tests/test_repos/test_WorkerJobStatusRepo.py b/admiral-worker/tests/test_repos/test_WorkerJobStatusRepo.py new file mode 100644 index 0000000..105caec --- /dev/null +++ b/admiral-worker/tests/test_repos/test_WorkerJobStatusRepo.py @@ -0,0 +1,17 @@ +import unittest + +from app.App import App + + +class test_WorkerJobStatusRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + workerJobStatuses = App.repos.workerJobStatusRepo.getAll() + self.assertGreater(len(workerJobStatuses), 0) + + workerJobStatus = App.repos.workerJobStatusRepo.get(id=workerJobStatuses[0].id) + self.assertIn(workerJobStatus, workerJobStatuses) diff --git a/admiral-worker/tests/test_repos/test_WorkerLogRepo.py b/admiral-worker/tests/test_repos/test_WorkerLogRepo.py new file mode 100644 index 0000000..9363ebf --- /dev/null +++ b/admiral-worker/tests/test_repos/test_WorkerLogRepo.py @@ -0,0 +1,17 @@ +import unittest + +from app.App import App + + +class test_WorkerLogRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + workerLogs = App.repos.workerLogRepo.getAll() + self.assertGreater(len(workerLogs), 0) + + workerLog = App.repos.workerLogRepo.get(id=workerLogs[0].id) + self.assertIn(workerLog, workerLogs) diff --git a/admiral-worker/tests/test_repos/test_WorkerRepo.py b/admiral-worker/tests/test_repos/test_WorkerRepo.py new file mode 100644 index 0000000..91d4868 --- /dev/null +++ b/admiral-worker/tests/test_repos/test_WorkerRepo.py @@ -0,0 +1,17 @@ +import unittest + +from app.App import App + + +class test_WorkerRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + workers = App.repos.workerRepo.getAll() + self.assertGreater(len(workers), 0) + + worker = App.repos.workerRepo.get(id=workers[0].id) + self.assertIn(worker, workers) diff --git a/admiral-worker/tests/test_repos/test_WorkerStatusRepo.py b/admiral-worker/tests/test_repos/test_WorkerStatusRepo.py new file mode 100644 index 0000000..77aa1fb --- /dev/null +++ b/admiral-worker/tests/test_repos/test_WorkerStatusRepo.py @@ -0,0 +1,17 @@ +import unittest + +from app.App import App + + +class test_WorkerStatusRepo(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_all(self): + workerStatuses = App.repos.workerStatusRepo.getAll() + self.assertGreater(len(workerStatuses), 0) + + workerStatus = App.repos.workerStatusRepo.get(id=workerStatuses[0].id) + self.assertIn(workerStatus, workerStatuses) diff --git a/admiral-worker/tests/test_services/.gitkeep b/admiral-worker/tests/test_services/.gitkeep new file mode 100644 index 0000000..057956d --- /dev/null +++ b/admiral-worker/tests/test_services/.gitkeep @@ -0,0 +1 @@ +This is ftp file diff --git a/admiral-worker/tests/test_services/__init__.py b/admiral-worker/tests/test_services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_services/test_FtpService.py b/admiral-worker/tests/test_services/test_FtpService.py new file mode 100644 index 0000000..299c6ac --- /dev/null +++ b/admiral-worker/tests/test_services/test_FtpService.py @@ -0,0 +1,18 @@ +import unittest +from pathlib import Path + +from app.App import App + + +class test_FtpService(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_download(self): + App.services.ftpService.download(path=Path(".gitkeep")) + + def test_download2(self): + for d in App.services.ftpService.scan(): + print(d.name.endswith()) diff --git a/admiral-worker/tests/test_services/test_OptimizationService.py b/admiral-worker/tests/test_services/test_OptimizationService.py new file mode 100644 index 0000000..d60e8f2 --- /dev/null +++ b/admiral-worker/tests/test_services/test_OptimizationService.py @@ -0,0 +1,108 @@ +import pickle +import random +import unittest +import uuid +from datetime import timedelta + +from app.App import App +from core.domain.map.GeoLocation import GeoLocation +from core.domain.optimization.TransportMode import TransportMode +from core.domain.optimization.OptimizationPoint import OptimizationPoint +from core.domain.optimization.OptimizationPointType import OptimizationPointType +from core.domain.optimization.OptimizationPostman import OptimizationPostman +from core.domain.optimization.OptimizationVehicle import OptimizationVehicle +from core.domain.map.PostOffice import PostOffice +from core.extend import fs +from core.types.Id import Id + + +class test_OptimizationService(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_vrpOptimization_original(self): + optimizationPointsNum = 50 + optimizationId = Id() + + random.seed(a=0) + + randomVehicleType = lambda num: [x for x in list(random.sample(sorted(list(TransportMode.__members__.values())), num))] + + optimizationPostman = [ + OptimizationPostman( + optimizationId=optimizationId, + deliveryTime=random.uniform(a=4.0, b=8.0), + vehicleTypes=randomVehicleType(num=random.randint(1, 3)), + quantity=random.randint(1, 5) + ) for _ in range(5) + ] + print("\nPostmans:") + for x in optimizationPostman: + print(f"\t- {x}") + + optimizationVehicles = [ + OptimizationVehicle( + optimizationId=optimizationId, + name="name", + category=randomVehicleType(1)[0], + capacity=random.randint(10, 50), + range=random.randint(50, 100), + minQuantity=0, + maxQuantity=3, + deliverySpeed=12, + travelingSpeed=12, + navigatingSpeed=12, + ) for _ in range(10) + ] + print("\nVehicles:") + for x in optimizationVehicles: + print(f"\t- {x}") + + optimizationPoints = [OptimizationPoint( + id=str(uuid.uuid4()), + address="", + location=GeoLocation(0, 0), + polygon=[], + type=OptimizationPointType.DEPOT, + serviceTime=timedelta(seconds=0), + demand=0, + stopTimes=[] + )] + [OptimizationPoint( + id=str(uuid.uuid4()), + address="", + location=GeoLocation(0, 0), + polygon=[], + type=OptimizationPointType.CRN, + serviceTime=timedelta(seconds=1), + demand=1, + stopTimes=[] + ) for _ in range(optimizationPointsNum)] + print("\nPoints:") + for x in optimizationPoints: + print(f"\t- {x}") + + distanceMatrix: dict[TransportMode, list[list[int]]] = {} + durationMatrix: dict[TransportMode, list[list[int]]] = {} + for k, v in TransportMode.__members__.items(): + distanceMatrix[v] = [[random.randint(1, 2) for _ in range(len(optimizationPoints) + 10)] for _ in range(len(optimizationPoints) + 10)] + durationMatrix[v] = [[random.randint(1, 2) for _ in range(len(optimizationPoints) + 10)] for _ in range(len(optimizationPoints) + 10)] + for i in range(optimizationPointsNum): + distanceMatrix[v][i][i] = 0 + durationMatrix[v][i][i] = 0 + + optimizationRoutes = App.services.optimizationService.vrpOptimization( + postOffice=PostOffice(postalCode="1000", name="name", location="location", address="address", unitType="CRN"), + solvingTime=timedelta(seconds=20), + optimizationPostman=optimizationPostman, + optimizationVehicles=optimizationVehicles, + optimizationPoints=optimizationPoints[:100], + distanceMatrix=distanceMatrix, + durationMatrix=durationMatrix + ) + print(optimizationRoutes) + + def test_readOptimizationFiles(self): + files = App.usecases.read_optimization_files.now(posta="1000") + print(files) diff --git a/admiral-worker/tests/test_services/test_RoutingService.py b/admiral-worker/tests/test_services/test_RoutingService.py new file mode 100644 index 0000000..8001163 --- /dev/null +++ b/admiral-worker/tests/test_services/test_RoutingService.py @@ -0,0 +1,113 @@ +import datetime +import random +import unittest + +from app.App import App +from core.domain.map.GeoLocation import GeoLocation +from core.domain.optimization.TransportMode import TransportMode + + +class test_RoutingService(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_speed(self): + starts = [GeoLocation(lat=46.0399789 + random.uniform(-0.1, 0.1), lon=14.5076505 + random.uniform(-0.1, 0.1)) for i in range(100)] + ends = [GeoLocation(lat=46.1399789 + random.uniform(-0.1, 0.1), lon=14.6076505 + random.uniform(-0.1, 0.1)) for i in range(100)] + start = datetime.datetime.now() + routeInfos = App.services.routingService.getRouteInfoMatrix(transportMode=TransportMode.WALK, starts=starts, ends=ends, accurate=True) + end = datetime.datetime.now() + diff = (end - start).total_seconds() + print(diff, len(routeInfos)) + + def test_routeInfo_walk(self): + start = GeoLocation(lat=46.0399789, lon=14.5076505) + end = GeoLocation(lat=46.0423508, lon=14.509163) + matrix = App.services.routingService.getRouteInfoMatrix( + transportMode=TransportMode.WALK, + starts=[start], + ends=[end], + accurate=True + ) + print(matrix) + + def test_routing_steps(self): + start = GeoLocation(46.093579794174296, 14.27906301507719) + end = GeoLocation(46.06541666237525, 14.314559238921543) + polyline = App.services.routingService._getPolyline(transportMode=TransportMode.CAR, legs=[start, end]) + routeInfo = App.services.routingService.getRouteInfo(transportMode=TransportMode.CAR, legs=[start, end]) + # + # steps2 = App.services.routingService.getRouteMatrix( + # transportMode=TransportMode.CAR, + # crnPoints=[start], + # ) + + print(polyline) + print(routeInfo.duration / 60,routeInfo) + # print(steps2) + + def test_distance_matrix(self): + crnPoints = App.services.postaService.getCrnPoints(posta=1355)[:10] + routeMatrix = App.services.routingService.getRouteMatrix( + crnPoints=crnPoints, + transportMode=TransportMode.CAR + ) + routeInfo = App.services.routingService.getRouteInfo( + legs=[crn.location for crn in crnPoints], + transportMode=TransportMode.CAR + ) + print(routeMatrix.data) + print(routeInfo) + + + def test_matrix(self): + crns = App.services.postaService.getCrnPoints(posta=1355) + posta = App.repos.postOfficeRepo.get(posta = 1355) + crns.insert(0, posta.getCrnPoint()) + + locations = [] + for i in [0, 12, 418]: + print(f"Index: {i}: {crns[i]}") + + locations.append(crns[i].location) + + for i in [0, 12, 418]: + for j in [0, 12, 418]: + crnStart = crns[i] + crnEnd = crns[j] + routeInfo = App.services.routingService.getRouteInfo(transportMode=TransportMode.CAR, legs=[crnStart.location, crnEnd.location]) + print(f"Start house: {crnStart.hisa}, end house: {crnEnd.hisa}, distance={routeInfo.distance}, duration={routeInfo.duration/60}") + + routeMatrix = App.services.routingService.getRouteMatrix(starts=locations, ends=locations, transportMode=TransportMode.CAR) + + print("Durations [min]") + print(routeMatrix.durations / 60) + print("Distances [m]") + print(routeMatrix.distances) + + def test_matrix2(self): + crns = App.services.postaService.getCrnPoints(posta=1355) + posta = App.repos.postOfficeRepo.get(posta = 1355) + crns.insert(0, posta.getCrnPoint()) + + locations = [] + for i in [0, 12, 418]: + print(f"Index: {i}: {crns[i]}") + locations.append(crns[i]) + + routeMatrix = App.services.routingService.getRouteMatrix(crnPoints=locations, transportMode=TransportMode.CAR) + print(routeMatrix.data.dtypes) + print(routeMatrix.data.to_string()) + + def test_matrix_route(self): + start = GeoLocation(46.039868240098436, 14.527908242928524) + end = GeoLocation(46.04046411845911, 14.52994784714586) + # matrix = App.services.routingService.getRouteMatrix(geoLocations=[start, end], transportMode=TransportMode.CAR) + # route = App.services.routingService.getRouteInfo(transportMode=TransportMode.CAR, legs=[start, end]) + route = App.services.routingService._getPolyline(transportMode=TransportMode.CAR, legs=[start, end]) + print(route) + + # print(matrix) + # print(route.duration,route.distance) diff --git a/admiral-worker/tests/test_services/test_SystemService.py b/admiral-worker/tests/test_services/test_SystemService.py new file mode 100644 index 0000000..8bddf84 --- /dev/null +++ b/admiral-worker/tests/test_services/test_SystemService.py @@ -0,0 +1,25 @@ +import unittest + +from app.App import App + + +class test_SystemService(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_getIp(self): + ip = App.services.systemService.getIp() + self.assertGreater(len(ip), 8) + self.assertEqual(ip.count('.'), 3) + + def test_getRam(self): + ram = App.services.systemService.getRamMbAvailable() + self.assertGreater(ram, 0) + self.assertLess(ram, 1000000) + + def test_getCpu(self): + cpu = App.services.systemService.getCpuUtilization() + self.assertGreater(cpu, 0) + self.assertLess(cpu, 100) diff --git a/admiral-worker/tests/test_usecases/__init__.py b/admiral-worker/tests/test_usecases/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_usecases/test_Run_updating_worker.py b/admiral-worker/tests/test_usecases/test_Run_updating_worker.py new file mode 100644 index 0000000..b2a1c99 --- /dev/null +++ b/admiral-worker/tests/test_usecases/test_Run_updating_worker.py @@ -0,0 +1,13 @@ +import unittest + +from app.App import App + + +class test_Run_updating_worker(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_now(self): + App.usecases.run_updating_worker.now() diff --git a/admiral-worker/tests/test_usecases/test_initialization/__init__.py b/admiral-worker/tests/test_usecases/test_initialization/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_usecases/test_initialization/test_Register_worker.py b/admiral-worker/tests/test_usecases/test_initialization/test_Register_worker.py new file mode 100644 index 0000000..4f61a84 --- /dev/null +++ b/admiral-worker/tests/test_usecases/test_initialization/test_Register_worker.py @@ -0,0 +1,23 @@ +import unittest + +from app.App import App +from core.usecases.initialization.Register_worker import Register_worker + + +class test_Register_worker(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_now(self): + ip = App.services.systemService.getIp() + + App.repos.workerRepo.deleteByIp(ip=ip) + result = App.usecases.register_worker.now() + + match type(result): + case Register_worker.Result.WorkerAlreadyExists: + raise Exception("Should not be already created!") + case Register_worker.Result.Ok: + pass diff --git a/admiral-worker/tests/test_usecases/test_jobs/__init__.py b/admiral-worker/tests/test_usecases/test_jobs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_usecases/test_jobs/test_Run_worker_optimization_job.py b/admiral-worker/tests/test_usecases/test_jobs/test_Run_worker_optimization_job.py new file mode 100644 index 0000000..01c50bd --- /dev/null +++ b/admiral-worker/tests/test_usecases/test_jobs/test_Run_worker_optimization_job.py @@ -0,0 +1,14 @@ +import time +import unittest + +from app.App import App + + +class test_Run_worker_optimization_job(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_now(self): + App.usecases.init_run_worker_optimization_job().now() diff --git a/admiral-worker/tests/test_usecases/test_logs/__init__.py b/admiral-worker/tests/test_usecases/test_logs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/admiral-worker/tests/test_usecases/test_logs/test_Log_worker_status.py b/admiral-worker/tests/test_usecases/test_logs/test_Log_worker_status.py new file mode 100644 index 0000000..a3156a4 --- /dev/null +++ b/admiral-worker/tests/test_usecases/test_logs/test_Log_worker_status.py @@ -0,0 +1,27 @@ +import datetime +import unittest + +from app.App import App +from core.usecases.logging.Log_worker_status import Log_worker_status + + +class test_Log_worker_status(unittest.TestCase): + + @classmethod + def setUpClass(cls): + App.init() + + def test_now(self): + now = int(datetime.datetime.now().timestamp()) - 1 + + App.usecases.register_worker.now() + result = App.usecases.log_worker_status.now() + match type(result): + case Log_worker_status.Result.WorkerNotFound: + raise Exception("Should not be already created!") + case Log_worker_status.Result.Ok: + pass + + self.assertGreater(result.workerStatus.cpuUtilization, 0) + self.assertGreater(result.workerStatus.ramAvailable, 0) + self.assertGreater(result.workerStatus.created_at, now)