[server]: Add permanent_data function
This commit is contained in:
142
server/src/functions/permanent_data/permanent_data.py
Normal file
142
server/src/functions/permanent_data/permanent_data.py
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
import json
|
||||||
|
import csv
|
||||||
|
import xmltodict
|
||||||
|
import requests
|
||||||
|
import zipfile
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import boto3
|
||||||
|
|
||||||
|
dynamodb = boto3.resource("dynamodb")
|
||||||
|
|
||||||
|
# API URLs
|
||||||
|
irishrail_url = "http://api.irishrail.ie/realtime/realtime.asmx/"
|
||||||
|
|
||||||
|
# function to fetch Irish Rail station data
|
||||||
|
def fetch_train_stations():
|
||||||
|
api_function = "getAllStationsXML_WithStationType?StationType="
|
||||||
|
station_types = ["M", "S", "D"]
|
||||||
|
stations = []
|
||||||
|
|
||||||
|
for station_type in station_types:
|
||||||
|
stations_xml = requests.get(irishrail_url + api_function + station_type).text
|
||||||
|
stations_json = json.loads(json.dumps(xmltodict.parse(stations_xml)))
|
||||||
|
|
||||||
|
for station in stations_json["ArrayOfObjStation"]["objStation"]:
|
||||||
|
stations.append({
|
||||||
|
"objectID": "IrishRailStation-" + station["StationCode"],
|
||||||
|
"objectType": "IrishRailStation",
|
||||||
|
"latitude": station["StationLatitude"],
|
||||||
|
"longitude": station["StationLongitude"],
|
||||||
|
|
||||||
|
"trainStationID": station["StationId"],
|
||||||
|
"trainStationCode": station["StationCode"],
|
||||||
|
"trainStationAlias": station["StationAlias"],
|
||||||
|
"trainStationDesc": station["StationDesc"],
|
||||||
|
"trainStationType": station_type
|
||||||
|
})
|
||||||
|
|
||||||
|
return stations
|
||||||
|
|
||||||
|
|
||||||
|
# function to fetch Luas stops data
|
||||||
|
def fetch_luas():
|
||||||
|
stops = []
|
||||||
|
|
||||||
|
stops_tsv = requests.get("https://data.tii.ie/Datasets/Luas/StopLocations/luas-stops.txt").content.decode('utf-8-sig')
|
||||||
|
tsv_reader = csv.DictReader(stops_tsv.splitlines(), delimiter="\t")
|
||||||
|
stops_json = [row for row in tsv_reader]
|
||||||
|
|
||||||
|
for stop in stops_json:
|
||||||
|
stops.append({
|
||||||
|
"objectID": "LuasStop-" + stop["Abbreviation"],
|
||||||
|
"objectType": "LuasStop",
|
||||||
|
"latitude": stop["Latitude"],
|
||||||
|
"longitude": stop["Longitude"],
|
||||||
|
|
||||||
|
"luasStopName": stop["Name"],
|
||||||
|
"luasStopIrishName": stop["IrishName"],
|
||||||
|
"luasStopID": stop["StopID"],
|
||||||
|
"luasStopCode": stop["Abbreviation"],
|
||||||
|
"luasStopLineID": stop["LineID"],
|
||||||
|
"luasStopSortOrder": stop["SortOrder"],
|
||||||
|
"luasStopIsEnabled": stop["IsEnabled"],
|
||||||
|
"luasStopIsParkAndRide": stop["IsParkAndRide"],
|
||||||
|
"luasStopIsCycleAndRide": stop["IsCycleAndRide"],
|
||||||
|
"luasStopZoneCountA": stop["ZoneCountA"],
|
||||||
|
"luasStopZoneCountB": stop["ZoneCountB"],
|
||||||
|
})
|
||||||
|
|
||||||
|
return stops
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_gtfs():
|
||||||
|
data = []
|
||||||
|
url = "https://www.transportforireland.ie/transitData/Data/GTFS_All.zip"
|
||||||
|
zip_file = requests.get(url).content
|
||||||
|
|
||||||
|
with zipfile.ZipFile(io.BytesIO(zip_file)) as zip:
|
||||||
|
# extract routes data
|
||||||
|
if "routes.txt" in zip.namelist():
|
||||||
|
with zip.open("routes.txt") as file:
|
||||||
|
routes_csv = file.read().decode('utf-8')
|
||||||
|
csv_reader = csv.DictReader(routes_csv.splitlines(), delimiter=",")
|
||||||
|
routes_json = [row for row in csv_reader]
|
||||||
|
|
||||||
|
for route in routes_json:
|
||||||
|
data.append({
|
||||||
|
"objectID": "BusRoute-" + route["route_id"],
|
||||||
|
"objectType": "BusRoute",
|
||||||
|
# no latitude or longitude
|
||||||
|
|
||||||
|
"busRouteAgencyID": route["agency_id"],
|
||||||
|
"busRouteShortName": route["route_short_name"],
|
||||||
|
"busRouteLongName": route["route_long_name"]
|
||||||
|
})
|
||||||
|
|
||||||
|
# extract stops data
|
||||||
|
if "stops.txt" in zip.namelist():
|
||||||
|
with zip.open("stops.txt") as file:
|
||||||
|
stops_csv = file.read().decode('utf-8')
|
||||||
|
csv_reader = csv.DictReader(stops_csv.splitlines(), delimiter=",")
|
||||||
|
stops_json = [row for row in csv_reader]
|
||||||
|
|
||||||
|
for stop in stops_json:
|
||||||
|
data.append({
|
||||||
|
"objectID": "BusStop-" + stop["stop_id"],
|
||||||
|
"objectType": "BusStop",
|
||||||
|
"latitude": stop["stop_lat"],
|
||||||
|
"longitude": stop["stop_lon"],
|
||||||
|
|
||||||
|
"busStopID": stop["stop_id"],
|
||||||
|
"busStopCode": stop["stop_code"],
|
||||||
|
"busStopName": stop["stop_name"]
|
||||||
|
})
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def lambda_handler(event, context):
|
||||||
|
print("Lambda Handler invoked! Retrieving data...")
|
||||||
|
data = fetch_train_stations() + fetch_luas() + fetch_gtfs()
|
||||||
|
print("Data retrieved successfully")
|
||||||
|
|
||||||
|
table_name = os.environ.get("DYNAMODB_TABLE", "permanent_data")
|
||||||
|
table = dynamodb.Table(table_name)
|
||||||
|
|
||||||
|
print("Attempting to batch upload retrieved data")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with table.batch_writer() as batch:
|
||||||
|
for record in data:
|
||||||
|
batch.put_item(Item=record)
|
||||||
|
|
||||||
|
print("done uploading")
|
||||||
|
|
||||||
|
return {
|
||||||
|
'statusCode': 200,
|
||||||
|
'body': json.dumps({'message': 'Data inserted successfully!'})
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {"statusCode": 500, "error": str(e)}
|
3
server/src/functions/permanent_data/requirements.txt
Normal file
3
server/src/functions/permanent_data/requirements.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
xmltodict
|
||||||
|
requests
|
||||||
|
boto3
|
11
server/src/functions/permanent_data/template.yaml
Normal file
11
server/src/functions/permanent_data/template.yaml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
AWSTemplateFormatVersion: '2010-09-09'
|
||||||
|
Transform: AWS::Serverless-2016-10-31
|
||||||
|
Resources:
|
||||||
|
MyLambdaFunction:
|
||||||
|
Type: AWS::Serverless::Function
|
||||||
|
Properties:
|
||||||
|
Handler: permanent_data.lambda_handler
|
||||||
|
Runtime: python3.13 # or whatever Python version you're using
|
||||||
|
CodeUri: .
|
||||||
|
MemorySize: 1000
|
||||||
|
Timeout: 600
|
Reference in New Issue
Block a user