Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 14 additions & 11 deletions pybikes/bicicard.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@
import json

from pybikes import BikeShareSystem, BikeShareStation, PyBikesScraper
from pybikes.contrib import TSTCache


AUTH_URL = '{endpoint}/api/certificado'
STATIONS_URL = '{endpoint}/apiapp/SBancada/Estado/TodosSimple'

cache = TSTCache(delta=3600)


def stupidict(thing):
""" makes all keys on a dict lowercase. Useful when different endpoints
Expand Down Expand Up @@ -45,18 +43,23 @@ def auth_url(self):
def stations_url(self):
return STATIONS_URL.format(endpoint=self.endpoint)

def authorize(self, scraper):
cert = json.loads(scraper.request(self.auth_url, cache_for=3600))
cert = stupidict(cert)
scraper.headers.update({
'Thumbprint': cert['thumbprint'],
})
return scraper


def update(self, scraper=None):
headers = {
scraper = scraper or PyBikesScraper()
scraper.headers.update({
'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 11; ROCINANTE FIRE Build/9001',
'DeviceModel': 'ROCINANTE FIRE',
}
scraper = scraper or PyBikesScraper(cache)
cert = json.loads(scraper.request(self.auth_url, headers=headers))
cert = stupidict(cert)
headers.update({
'Thumbprint': cert['thumbprint'],
})
data = scraper.request(self.stations_url, headers=headers, skip_cache=True)
self.authorize(scraper)
data = scraper.request(self.stations_url)
info = map(stupidict, json.loads(data))
self.stations = list(map(BicicardStation, info))

Expand Down
24 changes: 6 additions & 18 deletions pybikes/bicimad.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,12 @@
import json

from pybikes import BikeShareSystem, BikeShareStation, PyBikesScraper
from pybikes.contrib import TSTCache


COLORS = ['green', 'red', 'yellow', 'gray']
AUTH_URL = 'https://openapi.emtmadrid.es/v2/mobilitylabs/user/login/'
FEED_URL = 'https://openapi.emtmadrid.es/v2/transport/bicimad/stations/'

cache = TSTCache(delta=3600)


class Bicimad(BikeShareSystem):
authed = True
Expand All @@ -30,31 +27,22 @@ def __init__(self, tag, meta, key):
super(Bicimad, self).__init__(tag, meta)
self.key = key

@staticmethod
def authorize(scraper, key):
request = scraper.request

def authorize(self, scraper, key):
headers = {
'passkey': key['passkey'],
'x-clientid': key['clientid'],
}
accesstoken_content = scraper.request(AUTH_URL, headers=headers)
accesstoken_content = scraper.request(AUTH_URL, headers=headers, cache_for=3600)
accesstoken = json.loads(accesstoken_content)['data'][0]['accessToken']

def _request(*args, **kwargs):
headers = kwargs.get('headers', {})
headers.update({'accesstoken': accesstoken})
kwargs['headers'] = headers
return request(*args, **kwargs)

scraper.request = _request
scraper.headers.update({'accesstoken': accesstoken})

def update(self, scraper=None):
scraper = scraper or PyBikesScraper(cache)
scraper = scraper or PyBikesScraper()

Bicimad.authorize(scraper, self.key)
self.authorize(scraper, self.key)

scraper_content = scraper.request(FEED_URL, skip_cache=True)
scraper_content = scraper.request(FEED_URL)

data = json.loads(scraper_content)

Expand Down
146 changes: 141 additions & 5 deletions pybikes/contrib.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,15 @@
import re
import time
import inspect

try:
# Python 2
from itertools import imap as map
except ImportError:
# Python 3
pass

from pybikes import BikeShareSystem, BikeShareStation


class TSTCache(dict):
Expand All @@ -23,10 +34,8 @@ def __setitem__(self, key, value):
key = self.__transform_key__(key)
if not self.__test_key__(key):
return
self.store[key] = {
'value': value,
'ts': time.time()
}

self.store[key] = self.__transform_value__(key, value)

def __getitem__(self, key):
key = self.__transform_key__(key)
Expand All @@ -35,8 +44,11 @@ def __getitem__(self, key):
if key not in self.store:
raise KeyError('%s' % key)
ts_value = self.store[key]
if time.time() - ts_value['ts'] > self.delta:
the_time = time.time()

if the_time - ts_value['ts'] > self.__get_delta__(key, ts_value):
raise KeyError('%s' % key)

return ts_value['value']

def __contains__(self, key):
Expand All @@ -53,8 +65,132 @@ def __iter__(self):
def __len__(self):
return len(self.store)

def get(self, key, default=None):
return self[key] if key in self else default

def items(self):
return ((k, self[k]) for k, v in self.store.items() if k in self)

def keys(self):
return (k for k in self.store.keys() if k in self)

def __test_key__(self, key):
return True

def __transform_key__(self, key):
return key

def __transform_value__(self, key, value):
return {
'value': value,
'ts': time.time(),
}

def __get_delta__(self, key, entry):
return self.delta

def __repr__(self):
return self.store.__repr__()

def flush(self):
for k, v in list(self.store.items()):
if k in self:
continue
del self.store[k]


class PBCache(TSTCache):
""" PBCache stands for PyBikes Cache

It's the same as the TSTCache, but annotates entries with callstack
information based on being called from a bike share system

Gets initialized with a list of defined deltas per regex rule. Said
delta will be aplied to entries based on its annotation.
"""

def __init__(self, * args, ** kwargs):
self.deltas = kwargs.pop('deltas', [])
super(PBCache, self).__init__(* args, ** kwargs)

def __get_annotation__(self, key):
""" introspect call stack to find a bike share system """

def get_frame(entry):
""" python 2 and 3 compatible frame getter """
if isinstance(entry, tuple):
return entry[0]
else:
return entry.frame

def get_function(finfo):
""" python 2 and 3 compatible function getter """
if isinstance(finfo, tuple):
return finfo[3]
else:
return finfo.function

valid_types = (BikeShareSystem, )
stack = inspect.stack()
selfs = map(lambda f: (get_frame(f).f_locals.get('self'), f), stack)
bss = filter(lambda f: isinstance(f[0], valid_types), selfs)

some_bikeshare, frame_info = next(iter(bss), (None, None))

# no bike share found on call stack, bail
if not some_bikeshare:
return None

# create an annotation based on bike share found
# ie: 'gbfs::citi-bike-nyc::update::https://some-url'
annotation = '{cls}::{tag}::{method}::{key}'.format(
cls=some_bikeshare.__class__.__name__.lower(),
tag=some_bikeshare.tag,
method=get_function(frame_info),
key=key,
)

return annotation

def __match_delta__(self, key):
annotation = self.__get_annotation__(key)

if not annotation:
return None, None

# get a delta value based on annotation
# list of deltas are a list of dicts like
# - it's a list because it keeps order
# - it's made of dicts because it's a safe json structure
# [
# {'gbfs::.*::update': 100},
# {'gbfs::some-tag::update::some-url': 200},
# ]

# iterate items on delta list
deltas = map(lambda e: e.items(), self.deltas)
# flatten iterator
deltas = (e for it in deltas for e in it)
apply_rules = filter(lambda r: re.match(r[0], annotation), deltas)
_, delta = next(iter(apply_rules), (None, self.delta))

return delta, annotation

def __transform_value__(self, key, value):
delta, annotation = self.__match_delta__(key)
return {
'value': value,
'ts': time.time(),
'delta': delta,
'annotation': annotation,
}

def __get_delta__(self, key, entry):
delta = entry.get('delta')
# guards against a delta = 0 triggering return of self.delta
return delta if delta is not None else self.delta

def set_with_delta(self, key, value, delta):
entry = self.__transform_value__(key, value)
entry['delta'] = delta
self.store[key] = entry
6 changes: 1 addition & 5 deletions pybikes/deutschebahn.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

from pybikes import PyBikesScraper
from pybikes.gbfs import Gbfs
from pybikes.contrib import TSTCache


FEED_URL = 'https://apis.deutschebahn.com/db-api-marketplace/apis/shared-mobility-gbfs/2-2/de/{provider}/gbfs'
Expand Down Expand Up @@ -52,12 +51,9 @@ class Callabike(DB):

provider = 'CallABike'

# caches the feed for 60s
cache = TSTCache(delta=60)

def __init__(self, * args, ** kwargs):
super(Callabike, self).__init__(* args, provider=Callabike.provider, ** kwargs)

def update(self, scraper=None):
scraper = scraper or PyBikesScraper(self.cache)
scraper = scraper or PyBikesScraper()
super(Callabike, self).update(scraper)
28 changes: 20 additions & 8 deletions pybikes/gbfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,22 @@ def get_feeds(self, url, scraper, force_https):

return {feed['name']: feed['url'] for feed in feeds}

# We use these dumb functions to mark requests for caching with a
# 'gbfs::tag::get_station_information::url' signature
def get_station_information(self, scraper, feeds):
return json.loads(
scraper.request(feeds['station_information'])
)['data']['stations']

def get_station_status(self, scraper, feeds):
return json.loads(
scraper.request(feeds['station_status'])
)['data']['stations']

def get_vehicle_types(self, scraper, feeds):
return json.loads(
scraper.request(feeds['vehicle_types'])
)['data']['vehicle_types']

def update(self, scraper=None):
scraper = scraper or PyBikesScraper()
Expand All @@ -122,21 +138,17 @@ def update(self, scraper=None):
feeds = self.get_feeds(self.feed_url, scraper, self.force_https)

# Station Information and Station Status data retrieval
station_information = json.loads(
scraper.request(feeds['station_information'])
)['data']['stations']
station_status = json.loads(
scraper.request(feeds['station_status'])
)['data']['stations']
station_information = self.get_station_information(scraper, feeds)
station_status = self.get_station_status(scraper, feeds)

if 'vehicle_types' in feeds:
vehicle_info = json.loads(scraper.request(feeds['vehicle_types']))
vehicle_info = self.get_vehicle_types(scraper, feeds)
# map vehicle id to vehicle info AND extra info resolver
# for direct access
vehicles = {
# TODO: ungrok this line
v.get('vehicle_type_id', 'err'): (v, next(iter((r for q, r in self.vehicle_taxonomy if q(v))), lambda v: {}))
for v in vehicle_info['data'].get('vehicle_types', [])
for v in vehicle_info
}
else:
vehicles = {}
Expand Down
8 changes: 1 addition & 7 deletions pybikes/nextbike.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,10 @@

from .base import BikeShareSystem, BikeShareStation
from pybikes.utils import PyBikesScraper, filter_bounds
from pybikes.contrib import TSTCache

__all__ = ['Nextbike', 'NextbikeStation']

BASE_URL = 'https://{hostname}/maps/nextbike-live.xml?domains={domain}' # NOQA

# Since most networks share the same hostname, there's no need to keep hitting
# the endpoint on the same urls. This caches the feed for 60s
cache = TSTCache(delta=60)


class Nextbike(BikeShareSystem):
sync = True
Expand All @@ -38,7 +32,7 @@ def __init__(self, tag, meta, domain, city_uid, hostname='maps.nextbike.net',

def update(self, scraper=None):
if scraper is None:
scraper = PyBikesScraper(cache)
scraper = PyBikesScraper()
domain_xml = etree.fromstring(
scraper.request(self.url).encode('utf-8')
)
Expand Down
8 changes: 1 addition & 7 deletions pybikes/publibike.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,9 @@
import json

from pybikes import BikeShareSystem, BikeShareStation, PyBikesScraper
from pybikes.contrib import TSTCache

FEED_URL = 'https://api.publibike.ch/v1/public/partner/stations'

# caches the feed for 60s
cache = TSTCache(delta=60)


class Publibike(BikeShareSystem):
sync = True
Expand All @@ -29,9 +25,7 @@ def __init__(self, tag, meta, city_uid):
self.uid = city_uid

def update(self, scraper=None):
if scraper is None:
# use cached feed if possible
scraper = PyBikesScraper(cache)
scraper = scraper or PyBikesScraper()

stations = json.loads(
scraper.request(FEED_URL).encode('utf-8')
Expand Down
Loading