Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 8 additions & 9 deletions .env
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
#Express server port, uncomment and add yours, otherwise it will default to 3000 in server.js
#PORT=3000s
#PYTHON_URL=http://localhost:5000

#Load path for dataset here
PROCESSED_DATA_PATH=./mock_data/processed_data.json

# Path to raw CSV dataset
RAW_CSV_PATH=./mock_data/2881821.csv
PORT=3000
PGHOST=127.0.0.1
PGPORT=5432
PGUSER=jakebh
PGDATABASE=iot_dev
PGPASSWORD=
PROCESSED_DATA_PATH=BackendCode/mock_data/processed_data.json
RAW_CSV_PATH=BackendCode/mock_data/complex_formatted.csv
8 changes: 8 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
PORT=3000
PGHOST=127.0.0.1
PGPORT=5432
PGUSER=your_db_user
PGDATABASE=iot_dev
PGPASSWORD=
PROCESSED_DATA_PATH=BackendCode/mock_data/processed_data.json
RAW_CSV_PATH=BackendCode/mock_data/complex_formatted.csv
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
node_modules/
node_modules/.env
.env
93 changes: 38 additions & 55 deletions BackendCode/controllers/mockController.js
Original file line number Diff line number Diff line change
@@ -1,55 +1,38 @@
//handles HTTP request logic for mock data routes

const {
readProcessedData,
getAvailableStreamNames,
filterEntriesByStreamNames
} = require('../services/mockService');

//GET /streams — Returns JSON file containing the stream data
const getStreams = (req, res) => {
try {
const data = readProcessedData();
res.json(data);
} catch (err) {
console.error('Error reading stream data:', err);
res.status(500).json({ error: 'Failed to load stream data' });
}
};

//Get /stream-names — Returns an array of available stream names
const getStreamNames = (req, res) => {
try {
const streamNames = getAvailableStreamNames();
if (streamNames.length === 0) {
return res.status(404).json({ error: "No stream names found" });
}
res.json(streamNames);
} catch (err) {
console.error('Error getting stream names:', err);
res.status(500).json({ error: 'Failed to get stream names' });
}
};

//POST /filter-streams — Returns JSON file by Filtering entries by stream names (without time window)
const postFilterStreams = (req, res) => {
const { streamNames } = req.body;

if (!Array.isArray(streamNames) || streamNames.length === 0) {
return res.status(400).json({ error: 'streamNames must be a non-empty array' });
}

try {
const filtered = filterEntriesByStreamNames(streamNames);
res.json(filtered);
} catch (err) {
console.error('Error filtering stream data:', err);
res.status(500).json({ error: 'Failed to filter stream data' });
}
};

module.exports = {
getStreams,
getStreamNames,
postFilterStreams
};
const {
getStreams: repoGetStreams,
getStreamNames: repoGetStreamNames,
getFilteredStreams
} = require('../repositories/pgRepository');

async function getStreams(req, res) {
try {
const limit = Number(req.query.limit) || 8000;
const offset = Number(req.query.offset) || 0;
const since = req.query.since || undefined;
const data = await repoGetStreams({ limit, offset, since });
res.json(data);
} catch (e) {
res.status(500).json({ error: 'Failed to load streams' });
}
}

async function getStreamNames(_req, res) {
try {
const names = await repoGetStreamNames();
res.json(names);
} catch (e) {
res.status(500).json({ error: 'Failed to load stream names' });
}
}

async function postFilterStreams(req, res) {
try {
const { streamNames } = req.body || {};
const data = await getFilteredStreams(streamNames);
res.json(data);
} catch (e) {
res.status(500).json({ error: 'Failed to filter streams' });
}
}

module.exports = { getStreams, getStreamNames, postFilterStreams };
13 changes: 13 additions & 0 deletions BackendCode/db/pool.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
const { Pool } = require('pg');

const pool = new Pool({
host: process.env.PGHOST,
port: process.env.PGPORT,
user: process.env.PGUSER || undefined,
password: process.env.PGPASSWORD || undefined,
database: process.env.PGDATABASE,
max: 10,
idleTimeoutMillis: 30000,
});

module.exports = pool;
2 changes: 1 addition & 1 deletion BackendCode/repositories/mockRepository.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
//handles data access for mock data, reading from local JSON file without a database yet

require('dotenv').config({ path: '../.env' });
require('dotenv').config({ path: require('path').resolve(__dirname, '../../.env') });

const fs = require('fs');
const path = require('path');
Expand Down
61 changes: 61 additions & 0 deletions BackendCode/repositories/pgRepository.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
const pool = require('../db/pool');

const FIELD_MAP = [
{ db: 'temperature', api: 'Temperature' },
{ db: 'rh_humidity', api: 'RH Humidity' },
{ db: 'usable_light_index', api: 'Usable Light Index' },
{ db: 'atmosphere_hpa', api: 'Atmosphere hPa' },
{ db: 'voltage_charge', api: 'Voltage Charge' },
];

function toApiRow(r) {
const out = {
created_at: r.created_at,
entry_id: Number(r.entry_id),
was_interpolated: r.was_interpolated,
};
for (const f of FIELD_MAP) {
const v = r[f.db];
out[f.api] = (v === null || v === undefined) ? null : Number(v);
}
return out;
}

async function getStreams({ limit = 8000, offset = 0, since } = {}) {
const params = [];
let where = '';
if (since) {
params.push(since);
where = `WHERE created_at >= $${params.length}`;
}
params.push(limit, offset);

const sql = `
SELECT created_at, entry_id, temperature, rh_humidity,
usable_light_index, atmosphere_hpa, voltage_charge, was_interpolated
FROM readings
${where}
ORDER BY created_at ASC
LIMIT $${params.length-1} OFFSET $${params.length};
`;

const { rows } = await pool.query(sql, params);
return rows.map(toApiRow);
}

async function getStreamNames() {
return FIELD_MAP.map(f => f.api);
}

async function getFilteredStreams(streamNames = []) {
// Pull rows then project requested fields (keeps existing API shape)
const all = await getStreams({});
if (!streamNames || streamNames.length === 0) return all;
return all.map(r => {
const out = { created_at: r.created_at, entry_id: r.entry_id };
for (const name of streamNames) out[name] = r[name] ?? null;
return out;
});
}

module.exports = { getStreams, getStreamNames, getFilteredStreams };
2 changes: 1 addition & 1 deletion BackendCode/repositories/pythonRepository.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// handles data access for Python service, sending CSV and parameters via FormData

require('dotenv').config({ path: '../.env' });
require('dotenv').config({ path: require('path').resolve(__dirname, '../../.env') });

const fs = require('fs');
const path = require('path');
Expand Down
1 change: 1 addition & 0 deletions baseline/stream_names.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
["Temperature","RH Humidity","Usable Light Index","Atmosphere hPa","Voltage Charge"]
1 change: 1 addition & 0 deletions baseline/streams.json

Large diffs are not rendered by default.

11 changes: 11 additions & 0 deletions migrations/001_init.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
CREATE TABLE IF NOT EXISTS readings (
id BIGSERIAL PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL,
entry_id BIGINT UNIQUE NOT NULL,
temperature NUMERIC,
rh_humidity NUMERIC,
usable_light_index NUMERIC,
atmosphere_hpa NUMERIC,
voltage_charge NUMERIC,
was_interpolated BOOLEAN DEFAULT FALSE
);
Loading