diff --git a/.gitignore b/.gitignore index 21a0af6..8f66f74 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ *.pyc staticfiles +eoy-build diff --git a/README.md b/README.md index f4dcff1..b9fb697 100644 --- a/README.md +++ b/README.md @@ -30,8 +30,9 @@ The process of getting this thing up and running is currently a bit tedious, but we'll live with that for now. ## Database -Expects presence of PostgreSQL (9.4+) / PostGIS (2.1+). As a privileged user run -[db/init.sql](db/init.sql). This will create a database schema called `gtfs`, +Expects presence of PostgreSQL (10+) / PostGIS (2.4+). As a privileged user run +[resources/db/init.sql](src/resources/db/init.sql). +This will create a database schema called `gtfs`, a few tables into it (`gtfs.agency`, `gtfs.calendar`, `gtfs.routes`, `gtfs.shapes`, `gtfs.stop_times`, `gtfs.stops`, `gtfs.trips`) and three functions for dealing with location calculation (`gtfs.get_current_impeded_time`, @@ -41,41 +42,71 @@ last function goes to [rcoup](http://gis.stackexchange.com/users/564/rcoup)'s this function will not be necessary anymore and `st_split(geometry, geometry)` can be used instead. +**NOTE:** Tested on PostgreSQL 14 / PostGIS 3.2 and seems to be running +fine (@tkardi, 18.05.2022) + **NB! Before running the sql file, please read carefully what it does. A sane mind should not run whatever things in a database ;)** Once the database tables and functions have been set up, data can be inserted. -## web API -But still, before data can be loaded to the database, Django (2.2 is the -current LTS version), Django Rest Framework ja Django Rest Framework GIS -should be installed. We need Django for data loading as we'll use Django's -db connection factory. - -You can simply `pip` them +## Configuration +Configuration is loaded in the following order: +- [resources/global.params.json](/src/resources/global.params.json): this should +contain all app specific settings, regardless of the env we're running in. +- [resources/environment/${APP_ENV}.params.json](/src/resources/environment/dev.params.json): +should contain all environment specific configuration (like db connection params). +The file will selected based on the available `APP_ENV` environment variable +value (case does not matter), and will default to `DEV` if not set. So if you +call your environment `THIS-IS-IT`, then be sure to have a file called +`resources/environment/this-is-it.params.json` present aswell. +- Override parameters should be mounted to `/main/app/resources/override` path. +Expected filename is `params.json`. + +Missing any of these files will not raise an exception during configuration +loading but may hurt afterwards when a specific value that is needed is not +found. + +## Docker +The Flask app maybe run manually in terminal but the least-dependency-hell-way +seems to be via docker (official latest python:3 image). In the project root +(assuming your database connection is correctly configured in +[resources/environment/dev.params.json](/src/resources/dev.params.json)): ``` -$ pip install django==2.2 -$ pip install djangorestframework -$ pip install pip install djangorestframework-gis +$ source build.sh + [..] +Successfully tagged localhost/eoy:latest +$ ``` -or simply use the [`requirements.txt`](api/requirements.txt) because there are -some other things required aswell. -## Loading data +## Load data The configuration that is necessary for loading the data is described in -[api/conf/settings.py](api/conf/settings.py). To start the loading procedure -you need to run [api/sync/datasync.py](api/sync/datasync.py) +[Configuration](#configuration). To start the loading procedure +you need to run [tools/datasync.py](src/tools/datasync.py) -`$ python datasync.py` +``` +$ docker run -it --rm --network=host -e APP_ENV=DEV --name eoy localhost/eoy:latest python /main/app/tools/datasync.py + [..] +postprocess done +$ +``` -And after the loading has finished, again, as a privileged user run -[db/preprocess.sql](db/preprocess.sql). Then we can fire up Django's -development server with +## web API +Is based on Flask (Used to be Django, but not any more). -`$ python manage.py runserver` +``` +$ docker run -it --rm --network=host -e APP_ENV=DEV --name eoy localhost/eoy:latest +* Serving Flask app 'server' (lazy loading) +* Environment: production + WARNING: This is a development server. Do not use it in a production deployment. + Use a production WSGI server instead. +* Debug mode: off +* Running on http://127.0.0.1:5000 (Press CTRL+C to quit) + [..] +``` -Point your browser to http://127.0.0.1:8000?format=json and you should see a +Point your browser to http://127.0.0.1:5000/ and you should see a response: `{"message":"Nobody expects the spanish inquisition!"}` @@ -84,12 +115,12 @@ response: HTTP GET queries ### Current locations -http://127.0.0.1:8000/current/locations?format=json +http://127.0.0.1:5000/current/locations/ Returns currently active vehicles and their locations together with data on previous and next stops, and routes. ### Current trips -http://127.0.0.1:8000/current/trips?format=json +http://127.0.0.1:5000/current/trips/ Returns currently active trips as linestrings from the first stop of the trip to the last. diff --git a/api/api/settings.py b/api/api/settings.py deleted file mode 100644 index e273cd5..0000000 --- a/api/api/settings.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Django settings for api project. - -Generated by 'django-admin startproject' using Django 1.8.11. - -For more information on this file, see -https://docs.djangoproject.com/en/1.8/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/1.8/ref/settings/ -""" - -# Build paths inside the project like this: os.path.join(BASE_DIR, ...) -import os -from conf.settings import DEBUG, SECRET_KEY, ALLOWED_HOSTS, DB_USER, DB_PASSWORD -from conf.settings import DB_NAME, SYNC_USER, SYNC_PASSWORD, DB_HOST -from conf.settings import INSTALLED_APPS_X - -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ - -# Application definition - -INSTALLED_APPS = ( - #'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django.contrib.gis', - 'rest_framework', - 'rest_framework_gis', - 'eoy' -) - -INSTALLED_APPS = INSTALLED_APPS + INSTALLED_APPS_X - -MIDDLEWARE_CLASSES = ( - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'django.middleware.security.SecurityMiddleware', -) - -ROOT_URLCONF = 'api.urls' - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - }, - }, -] - -WSGI_APPLICATION = 'api.wsgi.application' - - -# Database -# https://docs.djangoproject.com/en/1.8/ref/settings/#databases - -DATABASES = { - 'default': { - 'NAME': DB_NAME, - 'ENGINE': 'django.contrib.gis.db.backends.postgis', - 'HOST': DB_HOST, - 'USER': DB_USER, - 'PASSWORD': DB_PASSWORD, - 'PORT': '5432' - }, - 'sync': { - 'NAME': DB_NAME, - 'ENGINE': 'django.contrib.gis.db.backends.postgis', - 'HOST': DB_HOST, - 'USER': SYNC_USER, - 'PASSWORD': SYNC_PASSWORD, - 'PORT': '5432' - } -} - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [ - os.path.join(BASE_DIR, 'templates'), - ], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - }, - }, -] - -REST_FRAMEWORK = { - 'DEFAULT_RENDERER_CLASSES': [ - 'rest_framework.renderers.TemplateHTMLRenderer', - 'rest_framework.renderers.JSONRenderer', -# 'rest_framework.renderers.BrowsableAPIRenderer', - ] -} - -#LOGGING = { -# 'version': 1, -# 'disable_existing_loggers': False, -# 'handlers': { -# 'console': { -# 'class': 'logging.StreamHandler', -# }, -# }, -# 'loggers': { -# 'django.db.backends': { -# 'level': 'DEBUG', -# 'handlers': ['console', ], -# }, -# } -#} - - -# Internationalization -# https://docs.djangoproject.com/en/1.8/topics/i18n/ - -LANGUAGE_CODE = 'et-EE' - -TIME_ZONE = 'UTC' - -USE_I18N = True - -USE_L10N = True - -USE_TZ = True - - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/1.8/howto/static-files/ - -STATIC_URL = '/static/' -STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') diff --git a/api/api/urls.py b/api/api/urls.py deleted file mode 100644 index 2a2e420..0000000 --- a/api/api/urls.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -"""api URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/1.8/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') -Including another URLconf - 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) -""" -from django.conf.urls import url -from django.urls import include, path -from django.contrib import admin -from rest_framework.urlpatterns import format_suffix_patterns -from conf.settings import INSTALLED_APPS_X - -from eoy import views - -urlpatterns = [ - #url(r'^admin/', include(admin.site.urls)), - url(r'^$', views.index, name='home'), - url( - r'^current/locations/$', views.LocTableAsList.as_view(), - name='locations-list'), - url( - r'^current/trips/$', views.index, - name='trips-list'), - url( - r'^current/flightradar/$', views.flightradar, - name='flights-list'), - url( - r'^current/traingps/$', views.traingps, - name='trains-list'), -] - -## add some extra urls -for app in INSTALLED_APPS_X: - urlpatterns.append( - path('', include('%s.urls' % app)), - ) - -urlpatterns = format_suffix_patterns(urlpatterns, allowed=['json', 'html']) diff --git a/api/api/wsgi.py b/api/api/wsgi.py deleted file mode 100644 index f0ac3a7..0000000 --- a/api/api/wsgi.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -WSGI config for api project. - -It exposes the WSGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ -""" - -import os - -from django.core.wsgi import get_wsgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.settings") - -application = get_wsgi_application() diff --git a/api/eoy/flaskapp.py b/api/eoy/flaskapp.py deleted file mode 100644 index d2de739..0000000 --- a/api/eoy/flaskapp.py +++ /dev/null @@ -1,26 +0,0 @@ -import json -from flask import Flask -from flask import Response - -from proxy import flightradar -from proxy import traingps - - - -app = Flask(__name__) - -@app.route('/flightradar') -def flightradar_get(): - return Response( - response=json.dumps(flightradar.get_flight_radar_data()), - status=200, - mimetype='application/json' - ) - -@app.route('/traingps') -def traingps_get(): - return Response( - response=json.dumps(traingps.get_train_gps_data()), - status=200, - mimetype='application/json' - ) diff --git a/api/eoy/models.py b/api/eoy/models.py deleted file mode 100644 index 0d96d10..0000000 --- a/api/eoy/models.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -from django.contrib.gis.db import models - -class Stops(models.Model): - stop_id = models.IntegerField(primary_key=True) - stop_name = models.CharField(max_length=250) - stop_lat = models.DecimalField(max_digits=12, decimal_places=9) - stop_lon = models.DecimalField(max_digits=12, decimal_places=9) - - class Meta: - managed = False - db_table = 'gtfs\".\"stops' - -class CurrentLocations(models.Model): - trip_id = models.IntegerField( - primary_key=True) - shape_id = models.IntegerField() - trip_start_time = models.CharField( - max_length=8, db_column='trip_start') - trip_end_time = models.CharField( - max_length=8, db_column='trip_fin') - current_time = models.CharField( - max_length=8, db_column='current_time') - #prevstop_id = models.ForeignKey( - # 'Stops', related_name='prevstop', db_column='prev_stop_id') - prevstop_name = models.CharField( - max_length=250, db_column='prev_stop') - prevstop_depart = models.CharField( - max_length=8, db_column='prev_stop_time') - #prevstop_seq = models.IntegerField() - #nextstop_id = models.ForeignKey( - # 'Stops', related_name='nextstop', db_column='next_stop_id') - nextstop_name = models.CharField( - max_length=250, db_column='next_stop') - nextstop_arrive = models.CharField( - max_length=8, db_column='next_stop_time') - #nextstop_seq = models.IntegerField() - trip_headsign = models.CharField(max_length=250) - trip_long_name = models.CharField(max_length=250) - route_short_name = models.CharField(max_length=100) - route_long_name = models.CharField(max_length=255) - route_color = models.CharField(max_length=10) - location = models.PointField(srid=4326, db_column='pos') - - class Meta: - managed = False - db_table = 'gtfs\".\"loctable_v2' - - -#class CurrentTrips(models.Model): -# -# class Meta: -# managed = False -# db_table = 'gtfs\".\"calctrips' diff --git a/api/eoy/serializers.py b/api/eoy/serializers.py deleted file mode 100644 index 82ed06f..0000000 --- a/api/eoy/serializers.py +++ /dev/null @@ -1,15 +0,0 @@ -from rest_framework_gis.serializers import GeoFeatureModelSerializer - -from eoy.models import CurrentLocations - -class LocationTableSerializer(GeoFeatureModelSerializer): - class Meta: - fields = ['trip_id', 'shape_id', 'trip_start_time', - 'trip_end_time', 'current_time', - 'prevstop_name', 'prevstop_depart', - 'nextstop_name', 'nextstop_arrive', 'trip_headsign', - 'trip_long_name', 'route_short_name', 'route_long_name', - 'route_color'] - model = CurrentLocations - lookup_field = 'trip_id' - geo_field = 'location' diff --git a/api/eoy/templatetags/__init__.py b/api/eoy/templatetags/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/api/eoy/templatetags/eoy_extras.py b/api/eoy/templatetags/eoy_extras.py deleted file mode 120000 index 7607d7a..0000000 --- a/api/eoy/templatetags/eoy_extras.py +++ /dev/null @@ -1 +0,0 @@ -../../templatetags/extra_templatetags.py \ No newline at end of file diff --git a/api/eoy/tests.py b/api/eoy/tests.py deleted file mode 100644 index 7ce503c..0000000 --- a/api/eoy/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/api/eoy/views.py b/api/eoy/views.py deleted file mode 100644 index 9fe3a1a..0000000 --- a/api/eoy/views.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -from rest_framework import generics -from rest_framework.decorators import api_view -from rest_framework.response import Response - -from eoy.models import CurrentLocations -from eoy import serializers - -from eoy.proxy import flightradar as f -from eoy.proxy import traingps as t - -# Create your views here. -@api_view(('GET', )) -def index(request, *args, **kwargs): - """Yes-yes-yes,... I'm up and running.""" - return Response({"message":"Nobody expects the spanish inquisition!"}) - -class LocTableAsList(generics.ListAPIView): - model = CurrentLocations - queryset = model.objects.all() - serializer_class = serializers.LocationTableSerializer - - def get_fields_for_model(self): - return self.model._meta.get_fields() - - def get(self, request, *args, **kwargs): - if request.accepted_renderer.format == 'html': - data = { - 'data' : self.get_queryset(), - 'fields': dict((field.name, field.get_internal_type()) for field in self.get_fields_for_model()) - } - return Response(data, template_name='list_rows.html') - return super(LocTableAsList, self).get(request, *args, **kwargs) - -@api_view(('GET', )) -def flightradar(request, *args, **kwargs): - return Response(f.get_flight_radar_data()) - -@api_view(('GET', )) -def traingps(request, *args, **kwargs): - return Response(t.get_train_gps_data()) diff --git a/api/manage.py b/api/manage.py deleted file mode 100644 index 8023a0a..0000000 --- a/api/manage.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env python -import os -import sys - -if __name__ == "__main__": - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.settings") - - from django.core.management import execute_from_command_line - - execute_from_command_line(sys.argv) diff --git a/api/requirements.txt b/api/requirements.txt deleted file mode 100644 index a2fb32a..0000000 --- a/api/requirements.txt +++ /dev/null @@ -1,27 +0,0 @@ -backcall==0.1.0 -certifi==2021.10.8 -chardet==3.0.4 -charset-normalizer==2.0.7 -decorator==4.4.0 -Django==2.2.24 -djangorestframework==3.11.2 -djangorestframework-gis==0.14 -idna==3.3 -ipython==7.8.0 -ipython-genutils==0.2.0 -jedi==0.15.1 -parso==0.5.1 -pexpect==4.7.0 -pickleshare==0.7.5 -prompt-toolkit==2.0.9 -psycopg2-binary==2.8.3 -ptyprocess==0.6.0 -Pygments==2.7.4 -pytz==2019.2 -requests==2.26.0 -six==1.12.0 -sqlparse==0.3.0 -traitlets==4.3.2 -unicodecsv==0.14.1 -urllib3==1.26.7 -wcwidth==0.1.7 diff --git a/api/sync/__init__.py b/api/sync/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/api/sync/preprocess-all.sql b/api/sync/preprocess-all.sql deleted file mode 100644 index 1cc5877..0000000 --- a/api/sync/preprocess-all.sql +++ /dev/null @@ -1,203 +0,0 @@ -drop view if exists gtfs.loctable_v2 -; -drop table if exists gtfs.calcshapes -; -drop table if exists gtfs.calcstopnodes -; -drop table if exists gtfs.calctrips -; -create table gtfs.calcshapes as -select - shape_id, st_makeline(array_agg(shape)) as shape, - st_collect(shape) as nodes -from ( - select - s.shape_id, st_setsrid(st_makepoint(s.shape_pt_lon, s.shape_pt_lat), 4326) as shape - from - gtfs.shapes s - order by s.shape_id, s.shape_pt_sequence) n -group by shape_id -order by shape_id; -alter table gtfs.calcshapes - add constraint pk__calcshapes primary key (shape_id); -create index sidx__calcshapes - on gtfs.calcshapes using gist (shape); -create index sidx__calcshapes_nodes - on gtfs.calcshapes using gist (shape); -create table gtfs.calcstopnodes as -select - shape_id, st_multi(st_collect(stop_node)) as stop_nodes, - trip_id, array_agg(stop_seq) as stop_seq -from ( - select - s.shape_id, - st_closestpoint(s.nodes, st_setsrid( - st_point(stops.stop_lon, stops.stop_lat), 4326)) as stop_node, - t.trip_id, st.stop_sequence as stop_seq - from - gtfs.calcshapes s, - gtfs.stop_times st, - gtfs.trips t, - gtfs.stops stops - where - st.stop_id = stops.stop_id and - st.trip_id = t.trip_id and - s.shape_id = t.shape_id - order by s.shape_id, t.trip_id, st.stop_sequence) m -group by shape_id, trip_id -; -alter table gtfs.calcstopnodes - add constraint pk__calcstopnodes primary key (trip_id) -; -create index sidx__calcstopnodes - on gtfs.calcstopnodes using gist (stop_nodes) -; -create table gtfs.calctrips as -with - splits as ( - select - cs.shape_id, sn.trip_id, - sn.stop_seq, - (st_dump(split_line_multipoint(cs.shape, sn.stop_nodes))).* - from - gtfs.calcshapes cs, - gtfs.calcstopnodes sn - where - cs.shape_id = sn.shape_id - ), - inbetween as ( - select - splits.shape_id, splits.trip_id, - splits.stop_seq[splits.path[1]] as from_stop, - splits.stop_seq[splits.path[1]+1] as to_stop, - splits.geom as shape - from splits - ), - triptimes as ( - select - trip_id, min(departure_time) as trip_start, - max(arrival_time) as trip_fin - from gtfs.stop_times - group by trip_id - ) -select - inbetween.shape_id, inbetween.trip_id, inbetween.shape, - triptimes.trip_start, triptimes.trip_fin, - inbetween.from_stop as from_stop_seq, - from_stops.stop_id as from_stop_id, - from_stops.departure_time as from_stop_time, - inbetween.to_stop as to_stop_seq, - to_stops.stop_id as to_stop_id, - to_stops.arrival_time as to_stop_time -from - inbetween, - gtfs.stop_times from_stops, - gtfs.stop_times to_stops, - triptimes -where - inbetween.trip_id = from_stops.trip_id and - inbetween.trip_id = to_stops.trip_id and - inbetween.from_stop = from_stops.stop_sequence and - inbetween.to_stop = to_stops.stop_sequence and - triptimes.trip_id = inbetween.trip_id -order by inbetween.trip_id, inbetween.from_stop -; -create index sidx__calctrips - on gtfs.calctrips using gist (shape) -; -create or replace view gtfs.loctable_v2 as -with - curtime as ( - select - clock_timestamp()::date AS cd, - to_char(clock_timestamp(), 'hh24:mi:ss'::text) AS ct, - date_part('dow'::text, clock_timestamp()) + 1 AS d, - lpad((to_char(clock_timestamp(), 'hh24')::int + 24)::varchar,2,'0')||':'||to_char(clock_timestamp(), 'mi:ss') as plushours - ), - cal as ( - select - c.service_id - from - gtfs.calendar c, - curtime - where - curtime.cd >= c.start_date and - curtime.cd <= c.end_date and - (array[ - c.monday, - c.tuesday, - c.wednesday, - c.thursday, - c.friday, - c.saturday, - c.sunday])[curtime.d] = true - ), - startstop as ( - select - calctrips.trip_id, calctrips.from_stop_time as leg_start, - calctrips.to_stop_time as leg_fin, calctrips.from_stop_id, - calctrips.to_stop_id, calctrips.from_stop_seq, - calctrips.to_stop_seq, calctrips.shape, - calctrips.trip_start, calctrips.trip_fin - from - gtfs.calctrips, curtime - where - ( - curtime.ct >= calctrips.from_stop_time::text and - curtime.ct <= calctrips.to_stop_time::text - ) or ( - curtime.plushours >= calctrips.from_stop_time::text and - curtime.plushours <= calctrips.to_stop_time::text - ) - ), - trip as ( - select - startstop.trip_id, trips.shape_id, - startstop.trip_start, startstop.trip_fin, - startstop.leg_start, startstop.leg_fin, - curtime.ct as cur, trips.trip_headsign, - trips.trip_long_name, routes.route_short_name, - routes.route_long_name, routes.route_color, - startstop.from_stop_id, startstop.to_stop_id, - startstop.from_stop_seq, startstop.to_stop_seq, - startstop.shape - from - cal, curtime, startstop, gtfs.trips trips, gtfs.routes routes - where - trips.trip_id = startstop.trip_id and - trips.service_id = cal.service_id and - trips.route_id::text = routes.route_id::text - ) -select - trip.trip_id, trip.shape_id, trip.trip_start, trip.trip_fin, - trip.trip_headsign, trip.trip_long_name, trip.route_short_name, - trip.route_long_name, - tostop.stop_id as next_stop_id, - tostop.stop_name as next_stop, - trip.leg_fin as next_stop_time, - fromstop.stop_id as prev_stop_id, - fromstop.stop_name as prev_stop, - trip.leg_start as prev_stop_time, - curtime.ct as current_time, - '#'::text || trip.route_color::text as route_color, - /* @tkardi 09.11.2021 st_flipcoordinates to quickly get API geojson - coors order correct. FIXME: should be a django version gdal version thing. - */ - st_flipcoordinates( - st_lineinterpolatepoint( - trip.shape, - gtfs.get_time_fraction( - trip.leg_start, - trip.leg_fin, - gtfs.get_current_impeded_time( - trip.leg_start, - trip.leg_fin, - trip.cur::character varying - ) - ) - ) - ) as pos -from curtime, trip - left join gtfs.stops tostop on trip.to_stop_id = tostop.stop_id - left join gtfs.stops fromstop on trip.from_stop_id = fromstop.stop_id -; diff --git a/api/templates/list_rows.html b/api/templates/list_rows.html deleted file mode 100644 index c563a4c..0000000 --- a/api/templates/list_rows.html +++ /dev/null @@ -1,162 +0,0 @@ -{% load staticfiles %} -{% load i18n %} -{% load rest_framework %} -{% load eoy_extras %} - - - - - {% block head %} - - {% block meta %} - - - {% endblock %} - - {% block title %}{% if title %}{{ title }} – {% endif %}this is a webpage{% endblock %} - - {% block style %} - {% block bootstrap_theme %} - - - - {% endblock %} - - - - {% endblock %} - - {% endblock %} - - - - - {% block body %} - -

{{ title }}

- {% include "map.html" %} -
-
- - - {% if uri_field %}{% endif %} - {% for field_name, type in fields.items %} - {% if type not in 'GeometryField,PointField' %} - - {% endif %} - {% endfor %} - -
uri - {{ field_name }} -
-
-
- - {% for row in data %} - - {% if uri_field %}{% endif %} - {% for field_name, type in fields.items %} - {% if type not in 'GeometryField,PointField' %} - - {% endif %} - {% endfor %} - - {% endfor %} -
view - {% if type == 'DateTimeField' %} - {{ row|get_value:field_name|date:'Y-m-d H:i:s'}} - {% else %} - {{ row|get_value:field_name|default_if_none:" " }} - {% endif %} -
-
-
- {% block script %} - - - - - - - - {% endblock %} - - {% endblock %} - diff --git a/api/templates/map.html b/api/templates/map.html deleted file mode 100644 index 79638ef..0000000 --- a/api/templates/map.html +++ /dev/null @@ -1,173 +0,0 @@ -{% load static %} -{% load eoy_extras %} - - - - - - - - - -
- -
- - - diff --git a/api/templatetags/extra_templatetags.py b/api/templatetags/extra_templatetags.py deleted file mode 100644 index b185459..0000000 --- a/api/templatetags/extra_templatetags.py +++ /dev/null @@ -1,29 +0,0 @@ -from django import template - -register = template.Library() - -@register.filter -def get_value(obj, key): - return getattr(obj, key, None) - -@register.filter -def get_item(dictionary, key): - return dictionary.get(key) - -@register.filter -def get_uri(dictionary): - if hasattr(dictionary, 'get'): - return dictionary.get("@id") - -@register.simple_tag -def query_transform(request, **kwargs): - params = request.GET.copy() - params.update(kwargs) - return '?%s' % params.urlencode() if params else '' - -@register.filter -def to_geojson(obj, key): - geom = getattr(obj, key, None) - if geom != None and hasattr(geom, 'geojson'): - return geom.geojson - return None diff --git a/build.sh b/build.sh new file mode 100644 index 0000000..e3c17e8 --- /dev/null +++ b/build.sh @@ -0,0 +1,15 @@ +#!/bin/bash +p_CURDIR=${PWD} + +rm -rf ./eoy-build || true +mkdir ./eoy-build + +cp -r ./src ./eoy-build/app +cp -r ./requirements.txt ./eoy-build/requirements.txt +cp -r ./docker/** ./eoy-build + +cd eoy-build + +docker build --tag localhost/eoy -f Dockerfile . + +cd $p_CURDIR diff --git a/doc/foss4ge2017_kardi.html b/doc/foss4ge2017_kardi.html index f5581d3..76eb5ee 100644 --- a/doc/foss4ge2017_kardi.html +++ b/doc/foss4ge2017_kardi.html @@ -171,11 +171,11 @@

Estimating public transit "real-time" locations based on time-table data

Current locations API
- (Django + Django REST framework) + was: Django + Django REST framework | is currently: Flask
- API
- https://tkardi.ee/current/locations/?format=json + API
+ https://tkardi.ee/gtfs/current/locations/?format=json
Example dashboard
diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..85d5d9e --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3 + +WORKDIR /main + +ENV PYTHONPATH=/main + +COPY requirements.txt ./ +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD [ "python", "/main/app/server/server.py" ] diff --git a/example/current.html b/example/current.html index 327fb92..f81af48 100644 --- a/example/current.html +++ b/example/current.html @@ -73,29 +73,31 @@ "#8BB4C5": "Ferry" }, // vehicle locations layer - realtime = L.realtime('https://tkardi.ee/current/locations/?format=json', { + realtime = L.realtime('https://tkardi.ee/gtfs/current/locations/?format=json', { interval: 3 * 1000, getFeatureId: function(feature) { - return feature.id; + return feature.properties.trip_id; }, pointToLayer: function(feature, latlng) { + console.log(feature); var marker = L.marker(latlng, { icon: L.AwesomeMarkers.icon({ prefix: 'fa', icon: 'fa-bus', markerColor: 'black', - iconColor: feature.properties.route_color + iconColor: feature.properties.route_color.toLowerCase() }), riseOnHover: true }).bindTooltip( L.Util.template( - '{route_short_name}: {trip_long_name}.
Headsign: {trip_headsign}.
Next: {nextstop_name} @ {nextstop_arrive}', + '{route_short_name}: {trip_long_name}.
Headsign: {trip_headsign}.
Next: {next_stop} @ {next_stop_time}', feature.properties ) ); return marker; }, onEachFeature: function(feature, layer) { + //console.log(feature); layer.on({ click: function (e) { follow_layer_id = e.target.feature.id; @@ -106,7 +108,7 @@ feature = layer.feature; layer.setTooltipContent( L.Util.template( - '{route_short_name}: {trip_long_name}.
Headsign: {trip_headsign}.
Next: {nextstop_name} @ {nextstop_arrive}', + '{route_short_name}: {trip_long_name}.
Headsign: {trip_headsign}.
Next: {next_stop} @ {next_stop_time}', feature.properties ) ); diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..74d242a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,18 @@ +Brotli==1.0.9 +certifi==2021.10.8 +charset-normalizer==2.0.12 +click==8.1.3 +dynaconf==3.1.8 +Flask==2.1.2 +Flask-Compress==1.12 +flup6==1.1.1 +idna==3.3 +importlib-metadata==4.11.3 +itsdangerous==2.1.2 +Jinja2==3.1.2 +MarkupSafe==2.1.1 +psycopg2==2.9.3 +requests==2.27.1 +urllib3==1.26.9 +Werkzeug==2.1.2 +zipp==3.8.0 diff --git a/src/load_resources.py b/src/load_resources.py new file mode 100644 index 0000000..a16a28c --- /dev/null +++ b/src/load_resources.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +import os +from dynaconf import Dynaconf + +_path = os.path.dirname(__file__) +APP_ENV = os.environ.get('APP_ENV', 'dev').lower() + +def load_settings(): + # ... and load settings + global_settings_file = os.path.join(_path, 'resources', 'global.params.json') + env_settings_file = os.path.join(_path, 'resources', 'environment', '%s.params.json' % APP_ENV) + override_settings_file = os.path.join(_path, 'resources', 'override', 'params.json') + + # just for logging purposes check which params files seem to be present. + [ + _check_settings_file(f) for f in [ + global_settings_file, + env_settings_file, + override_settings_file, + ] + ] + settings = Dynaconf( + settings_files=[ + global_settings_file, + env_settings_file, + override_settings_file + ] + ) + return settings + +def _check_settings_file(filepath): + if _file_exists_and_has_content(filepath): + #logger.debug('Using settings file from %s' % filepath) + return + #logger.info('Did not find settings file %s or file empty' % filepath) + +def _file_exists_and_has_content(filepath): + return os.path.exists(filepath) and os.path.getsize(filepath) > 0 + +settings = load_settings() diff --git a/db/init.sql b/src/resources/db/init.sql similarity index 87% rename from db/init.sql rename to src/resources/db/init.sql index ff37900..cfa2626 100644 --- a/db/init.sql +++ b/src/resources/db/init.sql @@ -1,9 +1,10 @@ -create schema gtfs authorization postgres; +create schema if not exists gtfs authorization postgres; comment on schema gtfs is 'Schema for GTFS data'; /* Tables */ -create table if not exists gtfs.agency ( +drop table if exists gtfs.agency; +create table gtfs.agency ( agency_id integer, agency_name character varying(250), agency_url character varying(250), @@ -12,12 +13,12 @@ create table if not exists gtfs.agency ( agency_lang character varying(3) ); -alter table gtfs.agency owner to postgres; alter table gtfs.agency add constraint pk__agency primary key (agency_id); -create table if not exists gtfs.calendar( +drop table if exists gtfs.calendar; +create table gtfs.calendar( service_id integer, monday boolean, tuesday boolean, @@ -29,12 +30,12 @@ create table if not exists gtfs.calendar( start_date date, end_date date ); -alter table gtfs.calendar owner to postgres; alter table gtfs.calendar add constraint pk__calendar primary key (service_id); -create table if not exists gtfs.routes( +drop table if exists gtfs.routes; +create table gtfs.routes( route_id character varying(32), agency_id integer, route_short_name character varying(100), @@ -43,25 +44,21 @@ create table if not exists gtfs.routes( route_color character varying(10), competent_authority character varying(100) ); -alter table gtfs.routes owner to postgres; alter table gtfs.routes add constraint pk__routes primary key (route_id); ---alter table gtfs.routes add constraint --- fk__routes__agency foreign key (agency_id) references gtfs.agency (agency_id) --- on update cascade on delete no action --- deferrable initially deferred; -create table if not exists gtfs.shapes( +drop table if exists gtfs.shapes; +create table gtfs.shapes( shape_id integer, shape_pt_lat numeric, shape_pt_lon numeric, shape_pt_sequence smallint ); -alter table gtfs.shapes owner to postgres; create unique index uidx__shapes on gtfs.shapes (shape_id, shape_pt_sequence); -create table if not exists gtfs.stops ( +drop table if exists gtfs.stops; +create table gtfs.stops ( stop_id integer, stop_code character varying(100), stop_name character varying(250), @@ -75,12 +72,11 @@ create table if not exists gtfs.stops ( lest_y numeric, zone_name character varying(250) ); -alter table gtfs.stops owner to postgres; alter table gtfs.stops add constraint pk__stops primary key (stop_id); - -create table if not exists gtfs.trips ( +drop table if exists gtfs.trips; +create table gtfs.trips ( route_id character varying(32), service_id integer, trip_id integer, @@ -90,10 +86,9 @@ create table if not exists gtfs.trips ( shape_id integer, wheelchair_accessible boolean ); -alter table gtfs.trips owner to postgres; - -create table if not exists gtfs.stop_times( +drop table if exists gtfs.stop_times; +create table gtfs.stop_times( trip_id integer, arrival_time character varying(8), departure_time character varying(8), @@ -102,12 +97,6 @@ create table if not exists gtfs.stop_times( pickup_type smallint, drop_off_type smallint ); -alter table gtfs.stop_times owner to postgres; ---alter table gtfs.stop_times add constraint --- fk__stop_times__stops foreign key (stop_id) references gtfs.stops (stop_id) --- on update cascade on delete no action --- deferrable initially deferred; - /* Functions */ @@ -120,11 +109,16 @@ declare a_cur int; a_strt int; a_fin int; + strt time; + fin interval; totalsecs numeric := 1; fractionsecs numeric := 0; begin + --a_fin := string_to_array(trip_fin, ':'); a_fin := extract(epoch from trip_fin::interval); + --a_strt := string_to_array(trip_start, ':'); a_strt := extract(epoch from trip_start::interval); + --a_cur := string_to_array(curtime, ':'); a_cur := extract(epoch from curtime::interval); if a_cur < a_strt then a_cur := a_cur + 24*60*60; @@ -133,14 +127,31 @@ begin fractionsecs := (a_cur::numeric-a_strt::numeric); totalsecs := (a_fin::numeric-a_strt::numeric); end if; --- raise notice 'Fraction %', fractionsecs; --- raise notice 'Total %', totalsecs; -return fractionsecs::numeric / totalsecs::numeric; + +/* if a_fin[1]::smallint >= 24 then + fin := ((a_fin[1]::smallint - 24)::varchar||':'||(a_fin)[2]||':'||(a_fin)[3])::time; + totalsecs := extract(epoch from (('24:00:00'::time - trip_start::time) + fin)); + else + totalsecs := extract(epoch from trip_fin::time - trip_start::time); + end if; + + if a_cur[1]::smallint < a_strt[1]::smallint then + fin := '24:00:00'::time - trip_start::time; + fractionsecs := extract(epoch from (curtime::time + fin)); + else + fractionsecs := extract(epoch from curtime::time - trip_start::time); + end if; + */ + --raise notice 'a_cur %', a_cur; + --raise notice 'a_strt %', a_strt; + --raise notice 'a_fin %', a_fin; + --raise notice 'Fraction %', fractionsecs; + --raise notice 'Total %', totalsecs; + return fractionsecs::numeric / totalsecs::numeric; end; $$ language plpgsql security invoker; -alter function gtfs.get_time_fraction(varchar, varchar, varchar) owner to postgres; comment on function gtfs.get_time_fraction(varchar, varchar, varchar) is 'Calculates the relative fraction that current time represents in between start and finish timestamps.'; @@ -249,7 +260,7 @@ begin -- doing full speed ->> return whatever timespan we need to cover X := nxt - prv; M := acctime + stoptime; - dt := ((X - 2 * acctime)::numeric / (X - 2 * M)::numeric) * (cur - prv - M)::numeric; + dt := ((X - 2 * acctime)::numeric / (X - 2 * M)::numeric + 0.000001) * (cur - prv - M)::numeric; dt := prv + acctime + dt; end if; return (timestamp 'epoch' + dt * interval '1 second')::time::varchar; @@ -257,7 +268,6 @@ end; $$ language plpgsql security invoker; -alter function gtfs.get_current_impeded_time(varchar, varchar, varchar, integer, integer) owner to postgres; comment on function gtfs.get_current_impeded_time( varchar, varchar, varchar, integer, integer ) is 'Calculates current "impeded time" based on last and next stoptimes and current real time as described in https://github.com/tkardi/eoy/issues/2'; @@ -360,7 +370,5 @@ $BODY$ $BODY$ LANGUAGE plpgsql IMMUTABLE COST 100; -ALTER FUNCTION public.split_line_multipoint(geometry, geometry) - OWNER TO postgres; comment on function public.split_line_multipoint(geometry, geometry) is 'Function by http://gis.stackexchange.com/users/564/rcoup posted @ http://gis.stackexchange.com/a/112317'; diff --git a/db/preprocess.sql b/src/resources/db/preprocess.sql similarity index 90% rename from db/preprocess.sql rename to src/resources/db/preprocess.sql index 3887447..c67ed1c 100644 --- a/db/preprocess.sql +++ b/src/resources/db/preprocess.sql @@ -41,7 +41,6 @@ create index sidx__calcshapes on gtfs.calcshapes using gist (shape); create index sidx__calcshapes_nodes on gtfs.calcshapes using gist (shape); -alter table gtfs.calcshapes owner to postgres; /** table: gtfs:calcstopnodes @@ -76,7 +75,6 @@ alter table gtfs.calcstopnodes add constraint pk__calcstopnodes primary key (trip_id); create index sidx__calcstopnodes on gtfs.calcstopnodes using gist (stop_nodes); -alter table gtfs.calcstopnodes owner to postgres; /** table: gtfs.calctrips @@ -138,11 +136,8 @@ where triptimes.trip_id = inbetween.trip_id order by inbetween.trip_id, inbetween.from_stop; ---alter table gtfs.calctrips add constraint pk__calcstopnodes primary key (trip_id); create index sidx__calctrips on gtfs.calctrips using gist (shape); -alter table gtfs.calctrips owner to postgres; - /** view: gtfs.loctable_v2 * @@ -227,25 +222,18 @@ select trip.leg_start as prev_stop_time, curtime.ct as current_time, '#'::text || trip.route_color::text as route_color, - /* @tkardi 09.11.2021 st_flipcoordinates to quickly get API geojson - coors order correct. FIXME: should be a django version gdal version thing. - */ - st_flipcoordinates( - st_lineinterpolatepoint( - trip.shape, - gtfs.get_time_fraction( + st_lineinterpolatepoint( + trip.shape, + gtfs.get_time_fraction( + trip.leg_start, + trip.leg_fin, + gtfs.get_current_impeded_time( trip.leg_start, trip.leg_fin, - gtfs.get_current_impeded_time( - trip.leg_start, - trip.leg_fin, - trip.cur::character varying - ) + trip.cur::character varying ) ) ) as pos from curtime, trip left join gtfs.stops tostop on trip.to_stop_id = tostop.stop_id left join gtfs.stops fromstop on trip.from_stop_id = fromstop.stop_id; - -alter table gtfs.loctable_v2 owner to postgres; diff --git a/db/scrap/kiirendus.png b/src/resources/db/scrap/kiirendus.png similarity index 100% rename from db/scrap/kiirendus.png rename to src/resources/db/scrap/kiirendus.png diff --git a/db/scrap/qtests.sql b/src/resources/db/scrap/qtests.sql similarity index 100% rename from db/scrap/qtests.sql rename to src/resources/db/scrap/qtests.sql diff --git a/api/api/__init__.py b/src/resources/environment/.gitignore similarity index 100% rename from api/api/__init__.py rename to src/resources/environment/.gitignore diff --git a/src/resources/environment/dev.params.json b/src/resources/environment/dev.params.json new file mode 100644 index 0000000..7ed2c9c --- /dev/null +++ b/src/resources/environment/dev.params.json @@ -0,0 +1,8 @@ +{ + "DATABASE": { + "host": "localhost", + "database": "postgres", + "user": "postgres", + "password": "postgres" + } +} diff --git a/src/resources/global.params.json b/src/resources/global.params.json new file mode 100644 index 0000000..7b50138 --- /dev/null +++ b/src/resources/global.params.json @@ -0,0 +1,13 @@ +{ + "GTFS_DBTABLES": [ + "shapes", + "stop_times", + "trips", + "stops", + "routes", + "calendar", + "agency" + ], + "GTFS_DBSCHEMA" : "gtfs", + "GTFS_ZIPURL" : "http://www.peatus.ee/gtfs/gtfs.zip" +} diff --git a/api/eoy/__init__.py b/src/server/__init__.py similarity index 100% rename from api/eoy/__init__.py rename to src/server/__init__.py diff --git a/src/server/exceptions.py b/src/server/exceptions.py new file mode 100644 index 0000000..ebef990 --- /dev/null +++ b/src/server/exceptions.py @@ -0,0 +1,15 @@ + +class ToHTTPError(Exception): + status_code = 500 + + def __init__(self, message, status_code=None, payload=None): + Exception.__init__(self) + self.message = message + if status_code is not None: + self.status_code = status_code + self.payload = payload + + def to_dict(self): + rv = dict(self.payload or ()) + rv['message'] = self.message + return rv diff --git a/api/eoy/proxy.py b/src/server/flightradar.py similarity index 53% rename from api/eoy/proxy.py rename to src/server/flightradar.py index 3ca20f0..b20725f 100644 --- a/api/eoy/proxy.py +++ b/src/server/flightradar.py @@ -31,39 +31,3 @@ def _to_geojson(self, data): type='FeatureCollection', features=f ) - -class TrainGPS(object): - def __init__(self, *args, **kwargs): - self.url = 'http://elron.ee/api/v1/map' - - def _to_float(self, string): - try: - return float(string) - except: - return string - - def get_train_gps_data(self): - r = requests.get(self.url) - r.raise_for_status() - return self._to_geojson(r.json()) - - def _to_geojson(self, data): - f = [ - OrderedDict( - type='Feature', - id=ac['reis'], - geometry=OrderedDict(type='Point', coordinates=[ - self._to_float(ac.get('longitude', "0")), - self._to_float(ac.get('latitude', "0"))] - ), - properties=OrderedDict(ac.copy()) - ) for ac in data.get('data', []) - ] - - return dict( - type='FeatureCollection', - features=f - ) - -flightradar = FlightRadar() -traingps = TrainGPS() diff --git a/src/server/gtfs.py b/src/server/gtfs.py new file mode 100644 index 0000000..2263096 --- /dev/null +++ b/src/server/gtfs.py @@ -0,0 +1,54 @@ +import psycopg2 +import json + +from app.server.exceptions import ToHTTPError +from app.load_resources import settings + +SQL_TEMPLATE = """select row_to_json(f.*)::jsonb from (select jsonb_agg(st_asgeojson(z.*)::jsonb)::jsonb as "features", 'FeatureCollection' as "type" from %s z) f""" +SQL_GET_LOCS = """gtfs.loctable_v2""" +SQL_GET_TRIPS = """(select t.*, s.shape as geom from gtfs.trips t, gtfs.calcshapes s where exists (select 1 from gtfs.loctable_v2 l where l.trip_id = t.trip_id) and s.shape_id = t.shape_id)""" + + +class AbstractTableRequestHandler(object): + DATABASE_CONNECTION=None + def __init__(self): + self.partial_sql = None + + def get_data(self): + if not self.DATABASE_CONNECTION or self.DATABASE_CONNECTION.closed == 1: + try: + self.DATABASE_CONNECTION = psycopg2.connect(**settings.DATABASE) + except (Exception, psycopg2.Error) as error: + raise ToHTTPError( + message=f"cannot connect to database", + status_code=500 + ) + with self.DATABASE_CONNECTION.cursor() as cur: + sql = SQL_TEMPLATE % self.partial_sql + cur.execute(sql) + if not cur: + raise ToHTTPError( + message=f"SQL query failed: {sql}", + status_code=404 + ) + return cur.fetchone()[0] + + def serve_request(self): + try: + return json.dumps(self.get_data()) + except ToHTTPError: + raise + except Exception as e: + raise ToHTTPError( + message=str(e), + status_code=500 + ) + +class LocTableRequestHandler(AbstractTableRequestHandler): + def __init__(self): + self.partial_sql = SQL_GET_LOCS + + +class TripTableRequestHandler(AbstractTableRequestHandler): + def __init__(self): + self.partial_sql = SQL_GET_TRIPS diff --git a/src/server/server.py b/src/server/server.py new file mode 100644 index 0000000..63a9a45 --- /dev/null +++ b/src/server/server.py @@ -0,0 +1,65 @@ +import json +import os + +from flask import Flask, request, send_file, make_response +from flask import Response +from flask import jsonify +from flask_compress import Compress + +from app.server.gtfs import LocTableRequestHandler +from app.server.gtfs import TripTableRequestHandler +from app.server.flightradar import FlightRadar +from app.server.exceptions import ToHTTPError + +app = Flask(__name__) +Compress(app) + +app.config['COMPRESS_MIMETYPES'].append('application/json') + +@app.errorhandler(ToHTTPError) +def handle_tohttperror(error): + response = jsonify(error.to_dict()) + response.status_code = error.status_code + response.headers = {'Access-Control-Allow-Origin':'*'} + return response + +@app.route("/") +def root(): + return Response( + json.dumps({"message":"Nobody expects the Spanish inquisition!"}), + mimetype='application/json', + headers={ + 'Content-Encoding':'UTF-8' + } + ) + +@app.route('/current/locations/') +def loc_table_request(): + return Response( + LocTableRequestHandler().serve_request(), + mimetype='application/json', + headers={ + 'Content-Encoding':'UTF-8' + } + ) + +@app.route('/current/trips/') +def trip_table_request(): + return Response( + TripTableRequestHandler().serve_request(), + mimetype='application/json', + headers={ + 'Content-Encoding':'UTF-8' + } + ) + +@app.route('/current/flightradar/') +def flightradar_get(): + return Response( + response=json.dumps(FlightRadar().get_flight_radar_data()), + status=200, + mimetype='application/json' + ) + +if __name__ == '__main__': + app.run() diff --git a/api/eoy/migrations/__init__.py b/src/tools/__init__.py similarity index 100% rename from api/eoy/migrations/__init__.py rename to src/tools/__init__.py diff --git a/api/sync/datasync.py b/src/tools/datasync.py similarity index 61% rename from api/sync/datasync.py rename to src/tools/datasync.py index 352c426..e946212 100644 --- a/api/sync/datasync.py +++ b/src/tools/datasync.py @@ -11,18 +11,9 @@ from io import StringIO import sys, os -sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -os.environ['DJANGO_SETTINGS_MODULE'] = 'api.settings' +from app.load_resources import settings -import django -django.setup() - - -from django.db import connections as CONNECTIONS -from conf.settings import DBTABLES, DBSCHEMA - -ZIPURL = 'http://www.peatus.ee/gtfs/gtfs.zip' -# FIXME: ZIPURL should be in conf.settings aswell! +_PATH = os.path.dirname(__file__) class FilteredCSVFile(csv.DictReader, object): """Local helper for reading only specified columns from a csv file. @@ -92,7 +83,7 @@ def _db_check_table(cursor, dbschema, dbtable): Returns a list with table's column names. """ - tab = '%s.%s' % (dbschema, dbtable) + tab = f'{dbschema}.{dbtable}' sql = "select array_agg(attname) from pg_attribute " \ "where attrelid=%s::regclass and not attisdropped and attnum > 0" params = (tab,) @@ -104,7 +95,7 @@ def _fs_check_csv(path, filename, ext='txt'): Returns a tuple of csv absolute filepath, and headers. """ - filename = '%s.%s' % (filename, ext) + filename = f'{filename}.{ext}' fp = os.path.join(path, filename) assert os.path.exists(fp) return fp, get_csv_header(fp) @@ -115,8 +106,7 @@ def _get_insert_cols(db_cols, fp_cols, dbschema, tablename): Use this to figure out which columns need to be read from the csv file. """ cols = list(set(db_cols).intersection(fp_cols)) - assert len(cols) > 0, "%s.%s and %s.csv do not share any columns" % ( - dbschema, dbtable, dbtable) + assert len(cols) > 0, f"{dbschema}.{dbtable} and {dbtable}.csv do not share any columns" return cols def _db_prepare_truncate(tableschema, tablename): @@ -125,9 +115,8 @@ def _db_prepare_truncate(tableschema, tablename): @FIXME: as this is prone to injection check whether the tablename mentioned in args really exists. """ - sql = """truncate table %(sch)s.%(tab)s cascade""" - params = dict(sch=tableschema, tab=tablename) - return sql % params + sql = f"""truncate table {tableschema}.{tablename} cascade""" + return sql #{main @@ -138,7 +127,7 @@ def run(): # local # to_path = 'tmp' # the real thing - to_path = download_zip(ZIPURL, tempfile.mkdtemp(prefix='eoy_')) + to_path = download_zip(settings.GTFS_ZIPURL, tempfile.mkdtemp(prefix='eoy_')) print(to_path) # loop through required files and look for a matching table # in the database @@ -146,45 +135,48 @@ def run(): # if table not found, raise exception # if exception, then rollback and stop whatever was going on # all database commands run in a single transaction - c = CONNECTIONS['sync'] - with c.cursor() as cursor: - # loop through the list of tables specified at - # conf.settings.DBTABLES - for dbtable in DBTABLES: - # check if table exists in db and get it's columns - db_cols = _db_check_table(cursor, DBSCHEMA, dbtable) - # check if file present and get csv header - fp, fp_cols = _fs_check_csv(to_path, dbtable) - print ('%s.%s' %(DBSCHEMA, dbtable)) - # get intersection of db_cols and fp_cols (i.e cols that - # are present in both) - cols = _get_insert_cols(db_cols, fp_cols, DBSCHEMA, dbtable) - # truncate old data, - st_trunc = _db_prepare_truncate(DBSCHEMA, dbtable) - cursor.execute(st_trunc) - # and fill anew ... - with open(fp, encoding='utf-8') as f: - fcsv = FilteredCSVFile(f, fieldnames=cols, quotechar='"') - tab = '%s.%s' % (DBSCHEMA, dbtable) - cursor.copy_from(io.StringIO(fcsv.read()), tab, sep='\t', columns=cols) - print(cursor.rowcount) - print('done %s' % fp) + with psycopg2.connect(**settings.DATABASE) as connection: + with connection.cursor() as cursor: + cursor.execute(f'SET search_path={settings.GTFS_DBSCHEMA},"$user",public') + # loop through the list of tables specified at + # settings.GTFS_DBTABLES + for dbtable in settings.GTFS_DBTABLES: + # check if table exists in db and get it's columns + db_cols = _db_check_table(cursor, settings.GTFS_DBSCHEMA, dbtable) + # check if file present and get csv header + fp, fp_cols = _fs_check_csv(to_path, dbtable) + print (f'{settings.GTFS_DBSCHEMA}.{dbtable}') + # get intersection of db_cols and fp_cols (i.e cols that + # are present in both) + cols = _get_insert_cols(db_cols, fp_cols, settings.GTFS_DBSCHEMA, dbtable) + # truncate old data, + st_trunc = _db_prepare_truncate(settings.GTFS_DBSCHEMA, dbtable) + cursor.execute(st_trunc) + # and fill anew ... + with open(fp, encoding='utf-8') as f: + fcsv = FilteredCSVFile(f, fieldnames=cols, quotechar='"') + #tab = '%s.%s' % (settings.GTFS_DBSCHEMA, dbtable) + cursor.copy_from(io.StringIO(fcsv.read()), dbtable, sep='\t', columns=cols) + print(cursor.rowcount) + print(f'done {fp}') except: raise - # FIXME: This is the place for calling data prep functions in the database. - - # keep the file for now... - #shutil.rmtree(to_path) + shutil.rmtree(to_path) - def postprocess(): - with open('preprocess-all.sql') as f: +def postprocess(): + print("starting postprocess...") + with psycopg2.connect(**settings.DATABASE) as connection: + fp = os.path.join(os.path.dirname(_PATH), 'resources', 'db', 'preprocess.sql') + with open(fp) as f: statements = f.read() - c = CONNECTIONS['sync'] - with c.cursor() as cursor: + with connection.cursor() as cursor: for statement in statements.split(';'): - c.execute(statement) + if statement.strip() != '': + cursor.execute(statement.strip()) + print("postprocess done") if __name__ == '__main__': run() + postprocess() #pass