From f4fa3e7d3aea4d2641d74cebe8180d5259239842 Mon Sep 17 00:00:00 2001 From: warrior25 Date: Wed, 23 Aug 2023 01:42:36 +0300 Subject: [PATCH] Code simplification and improvements --- custom_components/Nysse/fetch_api.py | 14 +- custom_components/Nysse/network.py | 26 +- custom_components/Nysse/nysse_data.py | 42 ++-- custom_components/Nysse/sensor.py | 333 +++++++++++++------------- 4 files changed, 203 insertions(+), 212 deletions(-) diff --git a/custom_components/Nysse/fetch_api.py b/custom_components/Nysse/fetch_api.py index 045a004..50faadd 100644 --- a/custom_components/Nysse/fetch_api.py +++ b/custom_components/Nysse/fetch_api.py @@ -1,4 +1,4 @@ -from .network import request +from .network import get from .const import NYSSE_STOP_POINTS_URL, NYSSE_LINES_URL import logging import json @@ -13,7 +13,7 @@ async def fetch_stop_points(has_id): else: stations = [] try: - result = await request(NYSSE_STOP_POINTS_URL) + result = await get(NYSSE_STOP_POINTS_URL) if not result: _LOGGER.error("Could not fetch stop points") return @@ -33,8 +33,8 @@ async def fetch_stop_points(has_id): stations = sorted(stations, key=lambda item: item["label"]) return stations - except OSError: - _LOGGER.error("Unknown exception. Check your internet connection") + except OSError as err: + _LOGGER.error("Failed to fetch stops: %s", err) return @@ -43,7 +43,7 @@ async def fetch_lines(stop): lines = [] try: lines_url = NYSSE_LINES_URL.format(stop) - result = await request(lines_url) + result = await get(lines_url) if not result: _LOGGER.error("Could not fetch lines points") return @@ -52,6 +52,6 @@ async def fetch_lines(stop): lines.append(line["name"]) return lines - except OSError: - _LOGGER.error("Unknown exception. Check your internet connection") + except OSError as err: + _LOGGER.error("Failed to fetch lines: %s", err) return diff --git a/custom_components/Nysse/network.py b/custom_components/Nysse/network.py index 477b55d..2cea4db 100644 --- a/custom_components/Nysse/network.py +++ b/custom_components/Nysse/network.py @@ -1,18 +1,20 @@ +import logging import aiohttp -import async_timeout +REQUEST_TIMEOUT = 30 +_LOGGER = logging.getLogger(__name__) -async def fetch(session, url): - try: - with async_timeout.timeout(15): + +async def get(url): + timeout = aiohttp.ClientTimeout(total=REQUEST_TIMEOUT) + async with aiohttp.ClientSession(timeout=timeout) as session: + try: async with session.get( url, headers={"Accept": "application/json"} ) as response: - return await response.text() - except: - pass - - -async def request(url): - async with aiohttp.ClientSession() as session: - return await fetch(session, url) + if response.status == 200: + return await response.text() + _LOGGER.error("GET %s: %s", url, response.status) + return + except aiohttp.ClientConnectorError as err: + _LOGGER.error("Connection error: %s", err) diff --git a/custom_components/Nysse/nysse_data.py b/custom_components/Nysse/nysse_data.py index 0024a1c..86bd147 100644 --- a/custom_components/Nysse/nysse_data.py +++ b/custom_components/Nysse/nysse_data.py @@ -19,12 +19,20 @@ def __init__(self): self._station_id = "" self._stops = [] - def populate(self, departures, journeys, station_id, stops, max_items): + def populate( + self, + departures, + journeys, + station_id, + stops, + max_items, + update_time, + ): """Collect sensor data to corresponding variables.""" departures2 = [] self._station_id = station_id self._stops = stops - self._last_update = datetime.now().astimezone(LOCAL_TZ) + self._last_update = update_time if self._station_id in departures["body"]: departures2 = departures["body"][self._station_id] @@ -33,25 +41,17 @@ def populate(self, departures, journeys, station_id, stops, max_items): self._json_data = departures2[:max_items] # Append static timetable data if not enough realtime data - weekday_int = datetime.today().weekday() i = 0 - while len(self._json_data) < max_items: - if len(journeys[weekday_int]) <= i: - i = 0 - if weekday_int < 6: - weekday_int += 1 - else: - weekday_int = 0 - if weekday_int == datetime.today().weekday(): - _LOGGER.warning( - "%s: Not enough timetable data was found. Try decreasing the number of requested departures", - station_id, - ) - break - else: - self._json_data.append(journeys[weekday_int][i]) + if i < len(journeys): + self._json_data.append(journeys[i]) i += 1 + else: + _LOGGER.warning( + "%s: Not enough timetable data was found. Try decreasing the number of requested departures", + station_id, + ) + break def get_state(self): """Get next departure time as the sensor state.""" @@ -68,7 +68,7 @@ def get_departures(self): "destination": self.get_destination_name(item), "line": item["lineRef"], "departure": self.get_departure_time(item, True), - "time_to_station": self.time_to_station(item), + "time_to_station": self.time_to_station(item, self._last_update), "icon": self.get_line_icon(item["lineRef"]), "realtime": self.is_realtime(item), } @@ -128,11 +128,11 @@ def get_destination_name(self, entry): return self._stops[entry["destinationShortName"]] return "unavailable" - def time_to_station(self, entry, seconds=False): + def time_to_station(self, entry, current_time, seconds=False): """Get time until departure in minutes""" time = self.get_departure_time(entry, False) if time != "unavailable": - next_departure_time = (time - datetime.now().astimezone(LOCAL_TZ)).seconds + next_departure_time = (time - current_time).seconds if seconds: return next_departure_time diff --git a/custom_components/Nysse/sensor.py b/custom_components/Nysse/sensor.py index 1be9196..b4e71be 100644 --- a/custom_components/Nysse/sensor.py +++ b/custom_components/Nysse/sensor.py @@ -1,17 +1,16 @@ """Platform for sensor integration.""" from __future__ import annotations -from dateutil import parser - import logging import json -import pytz +from itertools import cycle from datetime import timedelta, datetime -import time +import pytz +from dateutil import parser from homeassistant import config_entries, core from homeassistant.components.sensor import SensorEntity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .network import request +from .network import get from .nysse_data import NysseData from .fetch_api import fetch_stop_points from .const import ( @@ -26,6 +25,7 @@ _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=30) +PAGE_SIZE = 100 LOCAL_TZ = pytz.timezone("Europe/Helsinki") @@ -88,51 +88,94 @@ def __init__(self, name, station, maximum, timelimit, lines): self._nysse_data = NysseData() self._departures = [] self._stops = [] - self._journeys = {} + self._journeys = [] self._live_data = [] self._current_weekday_int = -1 + self._last_update_time = None + + async def fetch_stops(self): + if len(self._stops) == 0: + _LOGGER.debug("Fetching stops") + self._stops = await fetch_stop_points(False) + if len(self._stops) == 0: + _LOGGER.error("Failed to fetch stops") + + def strip_journey_data(self, journeys, weekday_int): + if weekday_int == self._current_weekday_int: + delta = timedelta(seconds=0) + elif weekday_int > self._current_weekday_int: + delta = timedelta(days=weekday_int - self._current_weekday_int) + else: + delta = timedelta(days=7 - self._current_weekday_int + weekday_int) + + journeys_data = [] + for journey in journeys["body"]: + for stop_point in journey["calls"]: + if stop_point["stopPoint"]["shortName"] == self.station_no: + formatted_journey = self.format_journey(journey, stop_point, delta) + json_dump = json.dumps(formatted_journey) + journeys_data.append(json.loads(json_dump)) + return journeys_data + + def format_journey(self, journey, stop_point, delta): + line_ref = journey["lineUrl"].split("/")[7] + destination_short_name = journey["calls"][-1]["stopPoint"]["shortName"] + expected_arrival_time = ( + (self._last_update_time + delta).strftime("%Y-%m-%dT") + + stop_point["arrivalTime"] + + self._last_update_time.strftime("%z")[:3] + + ":" + + self._last_update_time.strftime("%z")[3:] + ) + + formatted_data = { + "lineRef": line_ref, + "destinationShortName": destination_short_name, + "non-realtime": True, + "call": {"expectedArrivalTime": expected_arrival_time}, + } + return formatted_data def remove_stale_data(self): removed_journey_count = 0 - journeys_to_remove = [] - departures_to_remove = [] - - for weekday in range(0, 7): - for journey in self._journeys[weekday]: - if parser.parse( - journey["call"]["expectedArrivalTime"] - ) < datetime.now().astimezone(LOCAL_TZ) + timedelta( - minutes=self.timelimit - ) or ( - journey["lineRef"] not in self.lines - ): - journeys_to_remove.append(journey) - for journey1 in journeys_to_remove: + removed_departures = [] + + # Remove stale journeys based on time and lineRef + for journey in self._journeys[:]: + arrival_time = parser.parse(journey["call"]["expectedArrivalTime"]) + if arrival_time < self._last_update_time + timedelta( + minutes=self.timelimit + ) or (journey["lineRef"] not in self.lines): + self._journeys.remove(journey) + removed_journey_count += 1 + + # Remove stale departures based on time and lineRef + if len(self._live_data) > 0 and self.station_no in self._live_data["body"]: + for item in self._live_data["body"][self.station_no][:]: + time_to_station = self._nysse_data.time_to_station( + item, self._last_update_time, True + ) + if (time_to_station < (self.timelimit * 60)) or item[ + "lineRef" + ] not in self.lines: + removed_departures.append(item) + self._live_data["body"][self.station_no].remove(item) + + # Remove corresponding journeys for removed departures + for item in removed_departures: + departure_time = self._nysse_data.get_departure_time( + item, False, "aimedArrival" + ) + matching_journeys = [ + journey + for journey in self._journeys + if parser.parse(journey["call"]["expectedArrivalTime"]) + == departure_time + ] + for journey in matching_journeys: + self._journeys.remove(journey) removed_journey_count += 1 - self._journeys[weekday].remove(journey1) - journeys_to_remove.clear() - - if self.station_no in self._live_data["body"]: - for item in self._live_data["body"][self.station_no]: - for journey in self._journeys[self._current_weekday_int]: - if ( - parser.parse(journey["call"]["expectedArrivalTime"]) - == self._nysse_data.get_departure_time( - item, False, "aimedArrival" - ) - and journey not in journeys_to_remove - ): - journeys_to_remove.append(journey) - - if ( - self._nysse_data.time_to_station(item, True) < (self.timelimit * 60) - ) or item["lineRef"] not in self.lines: - departures_to_remove.append(item) - - for journey1 in journeys_to_remove: - removed_journey_count += 1 - self._journeys[self._current_weekday_int].remove(journey1) if removed_journey_count > 0: _LOGGER.debug( @@ -141,160 +184,106 @@ def remove_stale_data(self): removed_journey_count, ) - if len(departures_to_remove) > 0: + if len(removed_departures) > 0: _LOGGER.debug( - "%s: Removing %s stale or unwanted departures", + "%s: Removed %s stale or unwanted departures", self.station_no, - len(departures_to_remove), + len(removed_departures), ) - for item in departures_to_remove: - self._live_data["body"][self.station_no].remove(item) - - async def fetch_stops(self): - if len(self._stops) == 0: - _LOGGER.debug("Fectching stops") - self._stops = await fetch_stop_points(False) - def modify_journey_data(self, journeys, weekday_int): - journeys_data = [] + async def fetch_live_data(self): + departure_url = NYSSE_STOP_URL.format(self.station_no) + _LOGGER.debug( + "%s: Fectching departures from %s", self.station_no, departure_url + ) + live_data = await get(departure_url) + if not live_data: + _LOGGER.warning( + "%s: Can't fetch departures. Incorrect response from %s", + self.station_no, + departure_url, + ) + return json.loads(live_data) - if weekday_int == self._current_weekday_int: - delta = timedelta(seconds=0) - elif weekday_int > self._current_weekday_int: - delta = timedelta(days=(weekday_int - self._current_weekday_int)) - else: - delta = timedelta(days=(7 - self._current_weekday_int + weekday_int)) + async def fetch_journeys(self): + fetched_journeys = [] - for journey in journeys["body"]: - for stop_point in journey["calls"]: - if stop_point["stopPoint"]["shortName"] == self.station_no: - data_set = {} - data_set["lineRef"] = journey["lineUrl"].split("/")[7] - data_set["destinationShortName"] = journey["calls"][ - len(journey["calls"]) - 1 - ]["stopPoint"]["shortName"] - data_set["non-realtime"] = True - data_set2 = {} - - data_set2["expectedArrivalTime"] = ( - (datetime.now().astimezone(LOCAL_TZ) + delta).strftime( - "%Y-%m-%d" - ) - + "T" - + stop_point["arrivalTime"] - + datetime.now(LOCAL_TZ).strftime("%z")[:3] - + ":" - + datetime.now(LOCAL_TZ).strftime("%z")[3:] + async def fetch_data_for_weekday(weekday_index): + journeys_index = 0 + weekday_string = WEEKDAYS[weekday_index] + while True: + journeys_url = NYSSE_JOURNEYS_URL.format( + self.station_no, weekday_string, journeys_index + ) + _LOGGER.debug( + "%s: Fetching timetable data from %s", + self.station_no, + journeys_url, + ) + journeys_data = await get(journeys_url) + if not journeys_data: + _LOGGER.error( + "%s: Can't fetch timetables. Incorrect response from %s", + self.station_no, + journeys_url, ) + return - data_set["call"] = data_set2 + journeys_data_json = json.loads(journeys_data) + modified_journey_data = self.strip_journey_data( + journeys_data_json, weekday_index + ) - json_dump = json.dumps(data_set) + for journey in modified_journey_data: + fetched_journeys.append(journey) - journeys_data.append(json.loads(json_dump)) + if journeys_data_json["data"]["headers"]["paging"]["moreData"]: + journeys_index += PAGE_SIZE + else: + break - return journeys_data + for i in range(self._current_weekday_int, self._current_weekday_int + 7): + await fetch_data_for_weekday(i % 7) + + return fetched_journeys async def async_update(self): """Fetch new state data for the sensor. This is the only method that should fetch new data for Home Assistant. """ - - await self.fetch_stops() - self._current_weekday_int = datetime.today().weekday() - - departure_url = NYSSE_STOP_URL.format(self.station_no) + self._last_update_time = datetime.now().astimezone(LOCAL_TZ) + self._current_weekday_int = self._last_update_time.weekday() try: - _LOGGER.debug( - "%s: Fectching departures from %s", self.station_no, departure_url - ) - self._live_data = await request(departure_url) + await self.fetch_stops() + self.remove_stale_data() - if not self._live_data: - _LOGGER.warning( - "%s: Can't fetch departures. Incorrect response from %s", - self.station_no, - departure_url, - ) - self._live_data = [] - else: - self._live_data = json.loads(self._live_data) - - total_journeys_left = 0 - if len(self._journeys) == 7: - for i in range(7): - total_journeys_left += len(self._journeys[i]) + self._live_data = await self.fetch_live_data() - if total_journeys_left < self.max_items: + if len(self._journeys) < self.max_items: _LOGGER.debug( "%s: Not enough timetable data remaining. Trying to fetch new data", self.station_no, ) - self._journeys.clear() - - for weekday in WEEKDAYS: - journeys_index = 0 - weekday_int = time.strptime(weekday, "%A").tm_wday - - while True: - journeys_url = NYSSE_JOURNEYS_URL.format( - self.station_no, weekday, journeys_index - ) - - _LOGGER.debug( - "%s: Fetching timetable data from %s", - self.station_no, - journeys_url, - ) - journeys_data = await request(journeys_url) - - if not journeys_data: - _LOGGER.error( - "%s: Can't fetch timetables. Incorrect response from %s", - self.station_no, - journeys_url, - ) - return - - journeys_data_json = json.loads(journeys_data) - - modified_journey_data = self.modify_journey_data( - journeys_data_json, weekday_int - ) - - if not weekday_int in self._journeys: - self._journeys[weekday_int] = [] - - for journey in modified_journey_data: - self._journeys[weekday_int].append(journey) - - if journeys_data_json["data"]["headers"]["paging"]["moreData"]: - journeys_index += 100 - else: - break - - except OSError: - _LOGGER.error( - "%s: Unknown exception. Check your internet connection", self.station_no - ) - return - - self.remove_stale_data() - - _LOGGER.debug( - "%s: Data fetching complete. Populating sensor with data", self.station_no - ) - self._nysse_data.populate( - self._live_data, - self._journeys, - self.station_no, - self._stops, - self.max_items, - ) + self._journeys = await self.fetch_journeys() + self.remove_stale_data() + _LOGGER.debug( + "%s: Got %s valid journeys", self.station_no, len(self._journeys) + ) - self._state = self._nysse_data.get_state() - self._departures = self._nysse_data.get_departures() + _LOGGER.debug("%s: Data fetching complete", self.station_no) + self._nysse_data.populate( + self._live_data, + self._journeys, + self.station_no, + self._stops, + self.max_items, + self._last_update_time, + ) + self._state = self._nysse_data.get_state() + self._departures = self._nysse_data.get_departures() + except OSError as err: + _LOGGER.error("%s: Failed to update sensor: %s", self.station_no, err) @property def unique_id(self):