46 lines
1.8 KiB
PL/PgSQL
46 lines
1.8 KiB
PL/PgSQL
-- downloads data async into the http response table.
|
|
SELECT
|
|
cron.schedule(
|
|
'download-bus-data',
|
|
'5 seconds',
|
|
$$
|
|
select gather_bus_data();
|
|
$$
|
|
);
|
|
|
|
-- copies data into a more permanent tabe/more useful format. note: in
|
|
-- supabase, you cannot delete triggers on net._http_response
|
|
-- directly. but a cascading delete of the trigger function also
|
|
-- removes the trigger itself.
|
|
CREATE FUNCTION copy_to_raw_table() RETURNS trigger LANGUAGE plpgsql AS $$
|
|
BEGIN
|
|
-- NOTE: the http_response sequence resets on DB restart, so there
|
|
-- is potential for old responses to have duplicated ids. we use a
|
|
-- constraint to only update newer entries (within the last 30
|
|
-- seconds). this should make accidentally overwriting older data
|
|
-- with newer values difficult. The API also provides a
|
|
-- lastUpdated value. we require that raw.created is at or after
|
|
-- that value to be updated.
|
|
UPDATE raw_bus_positions raw
|
|
SET response_json = NEW.content::jsonb->'data'->'BusLocationByRoute',
|
|
response_status = NEW.status_code
|
|
WHERE raw.request_id = NEW.id
|
|
-- fairly generous constraint to account for long requests.
|
|
AND raw.created >= (NEW.created - '30 seconds'::interval)
|
|
-- the response must be at or after we actually sent the request.
|
|
AND raw.created >= (NEW.content::jsonb->'data'->'BusLocationByRoute'->>'lastUpdate')::timestamptz
|
|
AND NEW.status_code = 200
|
|
AND raw.response_json IS NULL;
|
|
|
|
-- what if we want to stream other data? we can do multiple updates in
|
|
-- different tables, where the request id is.
|
|
RETURN NULL; -- this is an AFTER trigger
|
|
END;
|
|
$$;
|
|
|
|
CREATE CONSTRAINT TRIGGER copy_http_response
|
|
AFTER INSERT ON net._http_response
|
|
DEFERRABLE INITIALLY DEFERRED
|
|
FOR EACH ROW
|
|
EXECUTE FUNCTION copy_to_raw_table();
|