diff --git a/data_science/datasets/1321079.csv b/datasets/1321079-sensor2.csv
similarity index 100%
rename from data_science/datasets/1321079.csv
rename to datasets/1321079-sensor2.csv
diff --git a/datasets/2881821.csv b/datasets/2881821-sensor1.csv
similarity index 100%
rename from datasets/2881821.csv
rename to datasets/2881821-sensor1.csv
diff --git a/data_science/datasets/518150.csv b/datasets/518150-sensor3.csv
similarity index 100%
rename from data_science/datasets/518150.csv
rename to datasets/518150-sensor3.csv
diff --git a/new-frontend/frontend/src/components/Chart.jsx b/new-frontend/frontend/src/components/Chart.jsx
index 5bcde13..0761439 100644
--- a/new-frontend/frontend/src/components/Chart.jsx
+++ b/new-frontend/frontend/src/components/Chart.jsx
@@ -1,25 +1,51 @@
import React from 'react';
-import { LineChart, Line, XAxis, YAxis, Tooltip, Legend, CartesianGrid} from 'recharts';
-// multiple charts with different colours
+import { LineChart, Line, XAxis, YAxis, Tooltip, Legend, CartesianGrid } from 'recharts';
+
const colors = ['#8884d8', '#82ca9d', '#ff7300', '#ff0000', '#00c49f', '#0088fe'];
-const Chart = ({ data, selectedStreams }) => (
-
-
-
-
-
-
- {selectedStreams.map((stream, i) => (
-
- ))}
-
-);
+// dot renderer that colors by per-point quality flag if present
+const makeCustomDot = (dataKey) => (props) => {
+ const { cx, cy, payload } = props;
+ if (cx == null || cy == null || !payload) return null;
+ const flag = payload[`${dataKey}_quality`];
+ const fill = flag === false ? '#ff0000' : '#00a35a'; // red for anomaly, green otherwise
+ return ;
+};
+
+export default function Chart({ data, selectedStreams }) {
+ if (!Array.isArray(data) || data.length === 0) return null;
-export default Chart;
+ const showDots = true;
+
+ return (
+
+
+ new Date(v).toLocaleString()}
+ scale="time"
+ />
+
+ new Date(v).toLocaleString()}
+ formatter={(value, name, props) => {
+ const q = props?.payload?.[`${name}_quality`];
+ return [value, q === false ? `${name} (anomaly)` : `${name} (normal)`];
+ }}
+ />
+
+ {selectedStreams.map((stream, i) => (
+
+ ))}
+
+ );
+}
diff --git a/new-frontend/frontend/src/components/Dashboard.jsx b/new-frontend/frontend/src/components/Dashboard.jsx
old mode 100755
new mode 100644
index 9d2e676..339abd8
--- a/new-frontend/frontend/src/components/Dashboard.jsx
+++ b/new-frontend/frontend/src/components/Dashboard.jsx
@@ -1,8 +1,8 @@
-import React, { useState } from 'react';
-import { useSensorData } from '../hooks/useSensorData.js';
-import { useFilteredData } from '../hooks/useFilteredData.js';
-import { useStreamNames } from '../hooks/useStreamNames.js';
-import { useTimeRange } from '../hooks/useTimeRange.js';
+import React, { useEffect, useMemo, useState } from 'react';
+import { useSensorData } from '../hooks/useSensorData.js'; // fallback/mock only
+import { useFilteredData } from '../hooks/useFilteredData.js'; // fallback/mock only
+import { useStreamNames } from '../hooks/useStreamNames.js'; // fallback/mock only
+import { useTimeRange } from '../hooks/useTimeRange.js'; // fallback/mock only
import TimeSelector from './TimeSelector.jsx';
import StreamSelector from './StreamSelector.jsx';
import IntervalSelector from './IntervalSelector.jsx';
@@ -10,182 +10,271 @@ import StreamStats from './StreamStats.jsx';
import './Dashboard.css';
import Chart from './Chart.jsx';
import MostCorrelatedPair from './MostCorrelatedPair.jsx';
-import ScatterPlot from './ScatterPlot.jsx';
+// helpers
+const getDatasetIdFromPath = () => {
+ const segs = window.location.pathname.replace(/\/+$/, '').split('/');
+ return segs[segs.length - 1] || '';
+};
+const toISO = (t) => (t instanceof Date ? t.toISOString() : new Date(t).toISOString());
+// Merge { streamA:[{ts,value,quality_flag}], ... } -> [{ts, timestamp, streamA, streamA_quality, ...}]
+function mergeSeriesToWide(seriesMap) {
+ if (!seriesMap) return null;
+ const bucket = new Map();
+ Object.entries(seriesMap).forEach(([stream, points]) => {
+ points.forEach(({ ts, value, quality_flag }) => {
+ const d = new Date(ts);
+ const key = d.toISOString();
+ const row = bucket.get(key) || {
+ ts: d,
+ timestamp: d.getTime(), // numeric ms for Chart X axis
+ };
+ row[stream] = value;
+ row[`${stream}_quality`] = quality_flag; // boolean (raw interval)
+ bucket.set(key, row);
+ });
+ });
-const Dashboard = () => {
- const { data, loading, error } = useSensorData(true); // mock mode
- const streamNames = useStreamNames(data);
- const [startTime, endTime] = useTimeRange(data);
- const timeOptions = useTimeRange(data);
- const [selectedTimeStart, setSelectedTimeStart] = useState('');
- const [selectedTimeEnd, setSelectedTimeEnd] = useState('');
- //const correlation = useCorrelationMatrix(data, streamNames, startTime, endTime);
- const [selectedStreams, setSelectedStreams] = useState([]);
+ return Array.from(bucket.values()).sort((a, b) => a.ts - b.ts);
+}
- const intervals = ['5min', '15min', '1h', '6h'];
+export default function Dashboard() {
+ // mock fallback (unchanged)
+ const { data: mockData, loading: mockLoading, error: mockError } = useSensorData(true);
+ const mockStreamNames = useStreamNames(mockData);
+ const [mockStart, mockEnd] = useTimeRange(mockData);
+ const mockTimeOptions = useTimeRange(mockData);
+ // selections
+ const [selectedTimeStart, setSelectedTimeStart] = useState('');
+ const [selectedTimeEnd, setSelectedTimeEnd] = useState('');
+ const [selectedStreams, setSelectedStreams] = useState([]); // MULTI
+ const intervals = ['raw', '5min', '15min', '1h', '6h'];
const [selectedInterval, setSelectedInterval] = useState(intervals[0]);
-
-
-
- const filteredData = useFilteredData(data, {
+ // local filtering (fallback only)
+ const filteredData = useFilteredData(mockData, {
startTime: selectedTimeStart,
endTime: selectedTimeEnd,
selectedStreams,
- interval: selectedInterval
+ interval: selectedInterval,
});
- const streamCount = selectedStreams.length;
+ const datasetId = getDatasetIdFromPath();
- const handleSubmit = () => {
- console.log('Selected Time Range:', selectedTimeStart, '→', selectedTimeEnd);
+ // meta + timestamps
+ const [metaLoading, setMetaLoading] = useState(false);
+ const [metaError, setMetaError] = useState(null);
+ const [meta, setMeta] = useState(null);
+ const [timestampOptions, setTimestampOptions] = useState([]);
-
- console.log('selectedInterval:', selectedInterval);
- // You can filter data, send to backend, or trigger chart updates
+ // fetched series
+ const [apiLoading, setApiLoading] = useState(false);
+ const [apiError, setApiError] = useState(null);
+ const [serverSeriesMap, setServerSeriesMap] = useState(null);
+ const serverChartData = useMemo(() => mergeSeriesToWide(serverSeriesMap), [serverSeriesMap]);
- console.log('Filtered Data:', filteredData);
- console.log('Selected Pair:', selectedPair);
-
-};
+ // prefer server merged data if it exists, otherwise mock-filtered
+ const displayData =
+ serverChartData && serverChartData.length > 0 ? serverChartData : filteredData;
- if (loading) return
Loading dataset...
;
- if (error) return
-
-
-
Note:
-
- Select at least one stream to view the line chart.
- Select two streams to see their scatter plot with a trendline, their correlation coefficient, and a rolling correlation line plot in the time interval using the selected time-window.
- Select at least three streams and a time range, to see which two streams are the most correlated in the selected time range, their scatter plot with a trendline.
-
- If no scatter plot is shown, it means there is not enough variance in the data during the selected time range.
- If no rolling correlation line is shown, it means there is not enough variance in the data during the selected time range.
- If no meaningful scatter plot is available for the most correlated pair, it means one or both streams lack variance in the selected time range.
- If no time range is selected, the entire dataset is used.
-
-
-
Total Data Points in Dataset: {data.length} |
-
- Data Points in Selected Range: {filteredData.length}
-
-
-
-
Streams: {streamNames.map(s => s.name).join(', ')}
-
+ setMeta(json);
+ if (!selectedTimeStart && json?.timeBounds?.start)
+ setSelectedTimeStart(json.timeBounds.start);
+ if (!selectedTimeEnd && json?.timeBounds?.end)
+ setSelectedTimeEnd(json.timeBounds.end);
+ if (selectedStreams.length === 0 && Array.isArray(json.fields) && json.fields.length > 0) {
+ setSelectedStreams([json.fields[0]]); // default to first stream
+ }
+ } catch (e) {
+ if (!cancelled) setMetaError(e.message);
+ } finally {
+ if (!cancelled) setMetaLoading(false);
+ }
+ })();
+ return () => {
+ cancelled = true;
+ };
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [datasetId]);
+ // load timestamp options (optional)
+ useEffect(() => {
+ let cancelled = false;
+ (async function loadTimestamps() {
+ if (!datasetId) return;
+ try {
+ const res = await fetch(
+ `/api/timestamps?datasetId=${encodeURIComponent(datasetId)}&limit=2000`
+ );
+ if (!res.ok) return;
+ const { timestamps } = await res.json();
+ if (cancelled) return;
+ const opts = Array.isArray(timestamps) ? timestamps : [];
+ setTimestampOptions(opts);
+ if (!selectedTimeStart && opts.length) setSelectedTimeStart(opts[0]);
+ if (!selectedTimeEnd && opts.length) setSelectedTimeEnd(opts[opts.length - 1]);
+ } catch {
+ // silent fallback to mock options
+ }
+ })();
+ return () => {
+ cancelled = true;
+ };
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [datasetId]);
-
-
+ // fetch series for multiple streams (parallel)
+ const handleSubmit = async () => {
+ setApiError(null);
+ setApiLoading(true);
+ setServerSeriesMap(null);
-
-
-
-
+ try {
+ if (!datasetId) throw new Error('Missing datasetId from URL');
+ if (!selectedStreams?.length) throw new Error('Please select at least one stream');
+ if (!selectedTimeStart || !selectedTimeEnd)
+ throw new Error('Please select start and end time');
+
+ const fromISO = toISO(selectedTimeStart);
+ const toISO_ = toISO(selectedTimeEnd);
+ const interval = selectedInterval || 'raw';
+
+ const tasks = selectedStreams.map(async (stream) => {
+ const url =
+ `/api/series?datasetId=${encodeURIComponent(datasetId)}` +
+ `&stream=${encodeURIComponent(stream)}` +
+ `&interval=${encodeURIComponent(interval)}` +
+ `&from=${encodeURIComponent(fromISO)}&to=${encodeURIComponent(toISO_)}`;
+
+ const res = await fetch(url);
+ if (!res.ok) {
+ const err = await res.json().catch(() => ({}));
+ throw new Error(err.message || `Series request failed (${res.status}) for ${stream}`);
+ }
+ const json = await res.json(); // { series: [{ ts, value, quality_flag? }, ...] }
+
+ console.log(`Stream "${stream}": ${json.series?.length || 0} rows`);
+ console.log(`Data for "${stream}":`, json.series);
+
+ return [stream, json.series || []];
+ });
+
+ const entries = await Promise.all(tasks);
+ const map = Object.fromEntries(entries);
+
+ setServerSeriesMap(map);
+
+ const merged = mergeSeriesToWide(map);
+ console.log('Merged chart data:', merged);
+ } catch (e) {
+ console.error(e);
+ setApiError(e.message);
+ } finally {
+ setApiLoading(false);
+ }
+ };
+
+ const streamListForLabel = meta?.fields?.length
+ ? meta.fields.join(', ')
+ : mockStreamNames.map((s) => s.name).join(', ');
+
+ if (mockLoading && metaLoading) return
Loading dataset...
;
+ if (mockError) return
Error loading local dataset: {String(mockError)}
;
+ if (metaError) return
Error loading metadata: {String(metaError)}
;
+
+ return (
+
+
+ Hello World! I just came alive with this Sensor Data Set with {meta?.fieldCount ?? 7} fields!!
-
-
-
-
-
Time Range Selection
-
-
-
-
+ Streams: {streamListForLabel}
+
+
+
+
-
-
+
- {/* this button for future use */}
-
- Analyse Time Range
-
-
-
-
-{/* add some space here */}
-
- {streamCount === 0 && (
-
-
Please select one or more streams to view statistics and charts.
-
- )}
- {streamCount === 1 && (
-
-
Selected one stream to see their scatter plot. Select another stream to explore correlations.
-
-
- )}
- {streamCount === 2 && (
-
-
Selected two streams to see their scatter plot and rolling correlation. Select one more stream to see the most correlated pair among the selected streams.
- {/*
Note: If no scatter plot is shown, it means there is not enough data to display it.
*/}
-
-
-
-
-
- )}
-
- {streamCount > 2 && (
-
-
Selected {streamCount} streams.
-
-
Note: If no scatter plot is shown, it means there is not enough variance in the data during the selected time range.
-
-
- )}
-
-
-
-
- {selectedStreams.map(stream => (
-
- ))}
-
-
-
-
+
+
Time Range Selection
+
+
+
+
+
+
+
+
+
+ {apiLoading ? 'Loading…' : 'Analyse Time Range'}
+
+ {apiError &&
{apiError}
}
+ {serverSeriesMap && !apiError && !apiLoading && (
+
Loaded server data for {Object.keys(serverSeriesMap).length} stream(s).
+ )}
+
+
+
+
-
-
-
-
-
-
+
+
+ {selectedStreams.map((stream) => (
+
+ ))}
+ {selectedStreams.length > 2 && (
+
+ )}
+
+
-
+
+
+
+
+
);
-};
-
-export default Dashboard;
+}
diff --git a/new-frontend/frontend/src/components/StreamSelector.jsx b/new-frontend/frontend/src/components/StreamSelector.jsx
index 4aa1a9e..6409f0c 100755
--- a/new-frontend/frontend/src/components/StreamSelector.jsx
+++ b/new-frontend/frontend/src/components/StreamSelector.jsx
@@ -1,38 +1,44 @@
-// components/StreamSelector.jsx
-import { useStreamNames } from '../hooks/useStreamNames';
+import React from 'react';
-const StreamSelector = ({ data, selectedStreams, setSelectedStreams }) => {
- const streamNames = useStreamNames(data);
+export default function StreamSelector({
+ streams = [], // e.g. ["field1","field2","field3", ...]
+ selectedStreams = [], // e.g. ["field1","field3"]
+ setSelectedStreams,
+ label = 'Select Streams'
+}) {
+ const onChange = (e) => {
+ const selected = Array.from(e.target.selectedOptions, opt => opt.value);
+ setSelectedStreams(selected);
+ console.log('Selected streams:', selected);
+ };
- if (!streamNames || streamNames.length === 0) {
- return
No streams available
;
+ if (!streams || streams.length === 0) {
+ return (
+
+
{label}
+
No streams available
+
+ );
}
-
return (
-
- Select Stream:
-
- {
- const selected = Array.from(e.target.selectedOptions, option => option.value);
- setSelectedStreams(selected);
- console.log('Selected streams:', selected);
- }}
- >
- {streamNames.map(stream => (
-
- {stream.name}
-
- ))}
-
-
-
+
{label}
+
+ {streams.map(name => (
+ {name}
+ ))}
+
+
+ Hold Ctrl (Windows/Linux) or Cmd (Mac) to select multiple.
+
);
-};
-
-export default StreamSelector;
+}
diff --git a/new-frontend/frontend/vite.config.js b/new-frontend/frontend/vite.config.js
index 8b0f57b..a472772 100644
--- a/new-frontend/frontend/vite.config.js
+++ b/new-frontend/frontend/vite.config.js
@@ -4,4 +4,10 @@ import react from '@vitejs/plugin-react'
// https://vite.dev/config/
export default defineConfig({
plugins: [react()],
+ server: {
+ proxy: {
+ '/api': { target: 'http://localhost:3000', changeOrigin: true },
+ '/health': { target: 'http://localhost:3000', changeOrigin: true }
+ }
+ }
})
diff --git a/newBackend/.env b/newBackend/.env
index 2edd30e..e7b6141 100644
--- a/newBackend/.env
+++ b/newBackend/.env
@@ -2,4 +2,11 @@
#PORT=3000s
#Load path for dataset here
-PROCESSED_DATA_PATH=./mock_data/processed_data.json
\ No newline at end of file
+#PROCESSED_DATA_PATH=./mock_data/processed_data.json
+
+PORT=3000
+PGHOST=localhost
+PGPORT=5432
+PGDATABASE=appdb
+PGUSER=postgres
+PGPASSWORD=typescript.postgre
diff --git a/newBackend/.gitignore b/newBackend/.gitignore
index 40b878d..3ec544c 100644
--- a/newBackend/.gitignore
+++ b/newBackend/.gitignore
@@ -1 +1,2 @@
-node_modules/
\ No newline at end of file
+node_modules/
+.env
\ No newline at end of file
diff --git a/newBackend/BackendCode/server.js b/newBackend/BackendCode/server.js
deleted file mode 100644
index b27fc78..0000000
--- a/newBackend/BackendCode/server.js
+++ /dev/null
@@ -1,25 +0,0 @@
-//handles server setup and configuration for the Express backend
-
-require('dotenv').config({ path: '../.env' }); //Load .env from root
-
-const express = require('express');
-const cors = require('cors');
-
-const mockRoutes = require('./routes/mock');
-
-const app = express();
-app.use(cors());
-app.use(express.json());
-
-app.get('/', (req, res) => {
- res.send('Backend is running');
-});
-
-//Mount mock routes
-app.use('/api', mockRoutes);
-
-//Start server
-const PORT = process.env.PORT || 3000;
-app.listen(PORT, () => {
- console.log(`Server running on http://localhost:${PORT}`);
-});
\ No newline at end of file
diff --git a/newBackend/README.md b/newBackend/README.md
index 49505b7..1589df4 100644
--- a/newBackend/README.md
+++ b/newBackend/README.md
@@ -1 +1,125 @@
-# BackendPrototype
\ No newline at end of file
+# BackendPrototype
+
+# Databytes – Intelligent IoT Data Management (MVP)
+
+This MVP lets you:
+
+upload time-series CSVs into PostgreSQL,
+
+fetch series via an Express API,
+
+visualise multiple streams in React with anomaly flags.
+
+# 1) Prerequisites
+
+Node.js 18+ and npm
+
+PostgreSQL 14+ (server), psql (CLI), and optionally pgAdmin
+
+A terminal (PowerShell on Windows, Terminal on macOS/Linux)
+
+# 2) Repo layout (relevant parts)
+
+- /frontend # Vite + React app
+- /newBackend # Express API
+- /sql/schema.sql # DB schema (datasets, timeseries_long)
+- /datasets # (local CSVs for ingest; gitignored)
+- /mappings # (optional mapping JSONs per dataset)
+
+# 3) Database setup (PostgreSQL)
+
+Open psql and run (adjust password as needed):
+
+from repo root (adjust path if needed)
+psql -U app -d appdb -h localhost -f sql/schema.sql
+if using the 'postgres' superuser instead:
+psql -U postgres -d appdb -h localhost -f sql/schema.sql
+
+
+appdb=# CREATE DATABASE appdb
+appdb=# \c appdb
+appdb=# \i C:\Users\Dell\Downloads\schema.sql
+appdb=# \dt
+appdb=# SELECT * FROM timeseries_long Limit 10;
+appdb=# SELECT * FROM datasets Limit 10;
+
+You should now have tables:
+
+- datasets (id, name)
+
+- timeseries_long (id, dataset_id, ts, metric, value, quality_flag)
+
+# 4) Backend (Express API)
+
+cd newBackend
+npm install
+
+Edit newBackend/.env to match your DB:
+PGHOST=localhost
+PGPORT=5432
+PGDATABASE=appdb
+PGUSER=app # or postgres
+PGPASSWORD=app # or your postgres password
+PORT=3000
+
+Start the API:
+npm run dev # if nodemon is configured
+or
+node server.js
+
+Key endpoints
+
+GET /api/datasets
+→ list available datasets (e.g., sensor1, sensor2)
+
+GET /api/datasets/:id/meta
+→ fields + time bounds for a dataset
+
+GET /api/series?datasetId=...&stream=...&interval=raw&from=...&to=...
+→ time series rows. For interval=raw, each item includes:
+{ "ts": "...", "value": 12.34, "quality_flag": true }
+
+# 5) Frontend (React + Vite)
+cd frontend
+npm install
+npm run dev
+
+# 6) Anomalies (quality flag)
+
+quality_flag is boolean:
+
+true = normal (green dot in chart)
+
+false = anomaly (red dot in chart)
+
+Frontend colors dots based on *_quality fields from the API.
+
+Data Science can update flags later without any API changes, e.g.:
+# 7) Typical workflow (quick start)
+
+DB: create appdb, run sql/schema.sql.
+
+Backend: set .env, npm install, node server.js.
+
+Ingest: load 1–2 CSVs into datasets/timeseries_long.
+
+Frontend: npm run dev, open dataset page (e.g., /dashboard/sensor1).
+
+Select streams + interval (raw for flags) + time range → Analyse.
+
+See lines + green/red dots (when flags exist).
+
+# 8) How to upload dataset into the PostgreSQL Databadse
+
+- To upload dataset 1 as sensor 1 plese run:
+
+npm run ingest -- --file ../datasets/2881821-sensor1.csv --map mappings/sensor1.json
+
+- To upload dataset 2 as sensor 2 plese run:
+
+npm run ingest -- --file ../datasets/1321079-sensor2.csv --map mappings/sensor2.json
+
+- To upload dataset 3 as sensor 3 plese run:
+
+npm run ingest -- --file ../datasets/518150-sensor3.csv --map mappings/sensor3.json
+
diff --git a/newBackend/mappings/sensor1.json b/newBackend/mappings/sensor1.json
new file mode 100644
index 0000000..6a69411
--- /dev/null
+++ b/newBackend/mappings/sensor1.json
@@ -0,0 +1,8 @@
+{
+ "datasetName": "sensor1",
+ "ts_column": "created_at",
+ "entity_column": "entry_id",
+ "metrics": "auto",
+ "renames": { }
+}
+
diff --git a/newBackend/mappings/sensor2.json b/newBackend/mappings/sensor2.json
new file mode 100644
index 0000000..ab4a5b3
--- /dev/null
+++ b/newBackend/mappings/sensor2.json
@@ -0,0 +1,8 @@
+{
+ "datasetName": "sensor2",
+ "ts_column": "created_at",
+ "entity_column": "entry_id",
+ "metrics": "auto",
+ "renames": { }
+}
+
diff --git a/newBackend/mappings/sensor3.json b/newBackend/mappings/sensor3.json
new file mode 100644
index 0000000..a06a37b
--- /dev/null
+++ b/newBackend/mappings/sensor3.json
@@ -0,0 +1,8 @@
+{
+ "datasetName": "sensor3",
+ "ts_column": "created_at",
+ "entity_column": "entry_id",
+ "metrics": "auto",
+ "renames": { }
+}
+
diff --git a/newBackend/package-lock.json b/newBackend/package-lock.json
index 76898e7..b1e71c0 100644
--- a/newBackend/package-lock.json
+++ b/newBackend/package-lock.json
@@ -1,17 +1,46 @@
{
- "name": "backendprototype",
+ "name": "newBackend",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
- "name": "backendprototype",
+ "name": "newBackend",
"version": "1.0.0",
- "license": "ISC",
"dependencies": {
"cors": "^2.8.5",
- "dotenv": "^17.2.1",
- "express": "^5.1.0"
+ "csv-parse": "^6.1.0",
+ "dotenv": "^17.2.2",
+ "express": "^5.1.0",
+ "fast-csv": "^5.0.5",
+ "multer": "^2.0.2",
+ "pg": "^8.16.3"
+ }
+ },
+ "node_modules/@fast-csv/format": {
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/@fast-csv/format/-/format-5.0.5.tgz",
+ "integrity": "sha512-0P9SJXXnqKdmuWlLaTelqbrfdgN37Mvrb369J6eNmqL41IEIZQmV4sNM4GgAK2Dz3aH04J0HKGDMJFkYObThTw==",
+ "license": "MIT",
+ "dependencies": {
+ "lodash.escaperegexp": "^4.1.2",
+ "lodash.isboolean": "^3.0.3",
+ "lodash.isfunction": "^3.0.9",
+ "lodash.isnil": "^4.0.0"
+ }
+ },
+ "node_modules/@fast-csv/parse": {
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/@fast-csv/parse/-/parse-5.0.5.tgz",
+ "integrity": "sha512-M0IbaXZDbxfOnpVE5Kps/a6FGlILLhtLsvWd9qNH3d2TxNnpbNkFf3KD26OmJX6MHq7PdQAl5htStDwnuwHx6w==",
+ "license": "MIT",
+ "dependencies": {
+ "lodash.escaperegexp": "^4.1.2",
+ "lodash.groupby": "^4.6.0",
+ "lodash.isfunction": "^3.0.9",
+ "lodash.isnil": "^4.0.0",
+ "lodash.isundefined": "^3.0.1",
+ "lodash.uniq": "^4.5.0"
}
},
"node_modules/accepts": {
@@ -27,6 +56,12 @@
"node": ">= 0.6"
}
},
+ "node_modules/append-field": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz",
+ "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==",
+ "license": "MIT"
+ },
"node_modules/body-parser": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz",
@@ -47,6 +82,23 @@
"node": ">=18"
}
},
+ "node_modules/buffer-from": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
+ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
+ "license": "MIT"
+ },
+ "node_modules/busboy": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
+ "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==",
+ "dependencies": {
+ "streamsearch": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=10.16.0"
+ }
+ },
"node_modules/bytes": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
@@ -85,6 +137,21 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/concat-stream": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz",
+ "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==",
+ "engines": [
+ "node >= 6.0"
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "buffer-from": "^1.0.0",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.0.2",
+ "typedarray": "^0.0.6"
+ }
+ },
"node_modules/content-disposition": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz",
@@ -137,6 +204,12 @@
"node": ">= 0.10"
}
},
+ "node_modules/csv-parse": {
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-6.1.0.tgz",
+ "integrity": "sha512-CEE+jwpgLn+MmtCpVcPtiCZpVtB6Z2OKPTr34pycYYoL7sxdOkXDdQ4lRiw6ioC0q6BLqhc6cKweCVvral8yhw==",
+ "license": "MIT"
+ },
"node_modules/debug": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
@@ -164,9 +237,9 @@
}
},
"node_modules/dotenv": {
- "version": "17.2.1",
- "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.1.tgz",
- "integrity": "sha512-kQhDYKZecqnM0fCnzI5eIv5L4cAe/iRI+HqMbO/hbRdTAeXDG+M9FjipUxNfbARuEg4iHIbhnhs78BCHNbSxEQ==",
+ "version": "17.2.2",
+ "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.2.tgz",
+ "integrity": "sha512-Sf2LSQP+bOlhKWWyhFsn0UsfdK/kCWRv1iuA2gXAwt3dyNabr6QSj00I2V10pidqz69soatm9ZwZvpQMTIOd5Q==",
"license": "BSD-2-Clause",
"engines": {
"node": ">=12"
@@ -291,6 +364,19 @@
"url": "https://opencollective.com/express"
}
},
+ "node_modules/fast-csv": {
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/fast-csv/-/fast-csv-5.0.5.tgz",
+ "integrity": "sha512-9//QpogDIPln5Dc8e3Q3vbSSLXlTeU7z1JqsUOXZYOln8EIn/OOO8+NS2c3ukR6oYngDd3+P1HXSkby3kNV9KA==",
+ "license": "MIT",
+ "dependencies": {
+ "@fast-csv/format": "5.0.5",
+ "@fast-csv/parse": "5.0.5"
+ },
+ "engines": {
+ "node": ">=10.0.0"
+ }
+ },
"node_modules/finalhandler": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz",
@@ -466,6 +552,48 @@
"integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==",
"license": "MIT"
},
+ "node_modules/lodash.escaperegexp": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz",
+ "integrity": "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==",
+ "license": "MIT"
+ },
+ "node_modules/lodash.groupby": {
+ "version": "4.6.0",
+ "resolved": "https://registry.npmjs.org/lodash.groupby/-/lodash.groupby-4.6.0.tgz",
+ "integrity": "sha512-5dcWxm23+VAoz+awKmBaiBvzox8+RqMgFhi7UvX9DHZr2HdxHXM/Wrf8cfKpsW37RNrvtPn6hSwNqurSILbmJw==",
+ "license": "MIT"
+ },
+ "node_modules/lodash.isboolean": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
+ "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==",
+ "license": "MIT"
+ },
+ "node_modules/lodash.isfunction": {
+ "version": "3.0.9",
+ "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz",
+ "integrity": "sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw==",
+ "license": "MIT"
+ },
+ "node_modules/lodash.isnil": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/lodash.isnil/-/lodash.isnil-4.0.0.tgz",
+ "integrity": "sha512-up2Mzq3545mwVnMhTDMdfoG1OurpA/s5t88JmQX809eH3C8491iu2sfKhTfhQtKY78oPNhiaHJUpT/dUDAAtng==",
+ "license": "MIT"
+ },
+ "node_modules/lodash.isundefined": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/lodash.isundefined/-/lodash.isundefined-3.0.1.tgz",
+ "integrity": "sha512-MXB1is3s899/cD8jheYYE2V9qTHwKvt+npCwpD+1Sxm3Q3cECXCiYHjeHWXNwr6Q0SOBPrYUDxendrO6goVTEA==",
+ "license": "MIT"
+ },
+ "node_modules/lodash.uniq": {
+ "version": "4.5.0",
+ "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
+ "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==",
+ "license": "MIT"
+ },
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
@@ -517,12 +645,94 @@
"node": ">= 0.6"
}
},
+ "node_modules/minimist": {
+ "version": "1.2.8",
+ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
+ "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/mkdirp": {
+ "version": "0.5.6",
+ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz",
+ "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==",
+ "license": "MIT",
+ "dependencies": {
+ "minimist": "^1.2.6"
+ },
+ "bin": {
+ "mkdirp": "bin/cmd.js"
+ }
+ },
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT"
},
+ "node_modules/multer": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.2.tgz",
+ "integrity": "sha512-u7f2xaZ/UG8oLXHvtF/oWTRvT44p9ecwBBqTwgJVq0+4BW1g8OW01TyMEGWBHbyMOYVHXslaut7qEQ1meATXgw==",
+ "license": "MIT",
+ "dependencies": {
+ "append-field": "^1.0.0",
+ "busboy": "^1.6.0",
+ "concat-stream": "^2.0.0",
+ "mkdirp": "^0.5.6",
+ "object-assign": "^4.1.1",
+ "type-is": "^1.6.18",
+ "xtend": "^4.0.2"
+ },
+ "engines": {
+ "node": ">= 10.16.0"
+ }
+ },
+ "node_modules/multer/node_modules/media-typer": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
+ "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/multer/node_modules/mime-db": {
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/multer/node_modules/mime-types": {
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "1.52.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/multer/node_modules/type-is": {
+ "version": "1.6.18",
+ "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
+ "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
+ "license": "MIT",
+ "dependencies": {
+ "media-typer": "0.3.0",
+ "mime-types": "~2.1.24"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/negotiator": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
@@ -592,6 +802,134 @@
"node": ">=16"
}
},
+ "node_modules/pg": {
+ "version": "8.16.3",
+ "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz",
+ "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==",
+ "license": "MIT",
+ "dependencies": {
+ "pg-connection-string": "^2.9.1",
+ "pg-pool": "^3.10.1",
+ "pg-protocol": "^1.10.3",
+ "pg-types": "2.2.0",
+ "pgpass": "1.0.5"
+ },
+ "engines": {
+ "node": ">= 16.0.0"
+ },
+ "optionalDependencies": {
+ "pg-cloudflare": "^1.2.7"
+ },
+ "peerDependencies": {
+ "pg-native": ">=3.0.1"
+ },
+ "peerDependenciesMeta": {
+ "pg-native": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/pg-cloudflare": {
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz",
+ "integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==",
+ "license": "MIT",
+ "optional": true
+ },
+ "node_modules/pg-connection-string": {
+ "version": "2.9.1",
+ "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz",
+ "integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==",
+ "license": "MIT"
+ },
+ "node_modules/pg-int8": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
+ "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=4.0.0"
+ }
+ },
+ "node_modules/pg-pool": {
+ "version": "3.10.1",
+ "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz",
+ "integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==",
+ "license": "MIT",
+ "peerDependencies": {
+ "pg": ">=8.0"
+ }
+ },
+ "node_modules/pg-protocol": {
+ "version": "1.10.3",
+ "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz",
+ "integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==",
+ "license": "MIT"
+ },
+ "node_modules/pg-types": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
+ "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
+ "license": "MIT",
+ "dependencies": {
+ "pg-int8": "1.0.1",
+ "postgres-array": "~2.0.0",
+ "postgres-bytea": "~1.0.0",
+ "postgres-date": "~1.0.4",
+ "postgres-interval": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/pgpass": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
+ "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
+ "license": "MIT",
+ "dependencies": {
+ "split2": "^4.1.0"
+ }
+ },
+ "node_modules/postgres-array": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
+ "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/postgres-bytea": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
+ "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/postgres-date": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
+ "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/postgres-interval": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
+ "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
+ "license": "MIT",
+ "dependencies": {
+ "xtend": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
"node_modules/proxy-addr": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
@@ -644,6 +982,20 @@
"node": ">= 0.8"
}
},
+ "node_modules/readable-stream": {
+ "version": "3.6.2",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
+ "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
+ "license": "MIT",
+ "dependencies": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
"node_modules/router": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz",
@@ -801,6 +1153,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/split2": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
+ "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">= 10.x"
+ }
+ },
"node_modules/statuses": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
@@ -810,6 +1171,23 @@
"node": ">= 0.8"
}
},
+ "node_modules/streamsearch": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz",
+ "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==",
+ "engines": {
+ "node": ">=10.0.0"
+ }
+ },
+ "node_modules/string_decoder": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
+ "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
+ "license": "MIT",
+ "dependencies": {
+ "safe-buffer": "~5.2.0"
+ }
+ },
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
@@ -833,6 +1211,12 @@
"node": ">= 0.6"
}
},
+ "node_modules/typedarray": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz",
+ "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==",
+ "license": "MIT"
+ },
"node_modules/unpipe": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
@@ -842,6 +1226,12 @@
"node": ">= 0.8"
}
},
+ "node_modules/util-deprecate": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
+ "license": "MIT"
+ },
"node_modules/vary": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
@@ -856,6 +1246,15 @@
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"license": "ISC"
+ },
+ "node_modules/xtend": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
+ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.4"
+ }
}
}
}
diff --git a/newBackend/package.json b/newBackend/package.json
index d59695d..5492e16 100644
--- a/newBackend/package.json
+++ b/newBackend/package.json
@@ -1,18 +1,22 @@
{
- "name": "backendprototype",
+ "name": "newBackend",
"version": "1.0.0",
- "description": "",
- "main": "index.js",
+ "description": "Express gateway for IoT MVP",
+ "main": "server.js",
+ "type": "commonjs",
"scripts": {
- "test": "echo \"Error: no test specified\" && exit 1"
+ "start": "node server.js",
+ "dev": "node --watch server.js",
+ "db:apply": "psql -d $PGDATABASE -f ../sql/schema.sql",
+ "ingest": "node scripts/ingest.js"
},
- "keywords": [],
- "author": "",
- "license": "ISC",
- "type": "commonjs",
"dependencies": {
"cors": "^2.8.5",
- "dotenv": "^17.2.1",
- "express": "^5.1.0"
+ "csv-parse": "^6.1.0",
+ "dotenv": "^17.2.2",
+ "express": "^5.1.0",
+ "fast-csv": "^5.0.5",
+ "multer": "^2.0.2",
+ "pg": "^8.16.3"
}
}
diff --git a/newBackend/scripts/ingest.js b/newBackend/scripts/ingest.js
new file mode 100644
index 0000000..28c65ee
--- /dev/null
+++ b/newBackend/scripts/ingest.js
@@ -0,0 +1,112 @@
+#!/usr/bin/env node
+require('dotenv').config();
+const fs = require('fs');
+const path = require('path');
+const { parse } = require('csv-parse');
+const pool = require('../src/db/pool');
+
+function usage() {
+ console.log('Usage: node scripts/ingest.js --file path/to.csv --map mappings/sensor2.json');
+ process.exit(1);
+}
+
+const args = Object.fromEntries(process.argv.slice(2).reduce((a,x,i,arr)=>{
+ if (x.startsWith('--')) a.push([x.slice(2), arr[i+1]]);
+ return a;
+}, []));
+if (!args.file || !args.map) usage();
+
+(async () => {
+ const map = JSON.parse(fs.readFileSync(args.map, 'utf8'));
+ const file = path.resolve(args.file);
+
+ const client = await pool.connect();
+ try {
+ await client.query('BEGIN');
+
+ // ensure dataset exists
+ const dsRes = await client.query(
+ 'INSERT INTO datasets(name) VALUES($1) ON CONFLICT(name) DO UPDATE SET name=EXCLUDED.name RETURNING id',
+ [map.datasetName]
+ );
+ const datasetId = dsRes.rows[0].id;
+
+ // prepare CSV parser
+ const parser = fs.createReadStream(file).pipe(parse({ columns: true, skip_empty_lines: true }));
+ const tsCol = map.ts_column;
+ const entityCol = map.entity_column || null;
+ let headerMetrics = null;
+
+ // batch insert
+ const batchSize = 1000;
+ let batch = [];
+ let inserted = 0;
+
+ for await (const row of parser) {
+ const ts = row[tsCol];
+ if (!ts) continue;
+ const entity = entityCol ? (row[entityCol] || null) : null;
+
+ // determine metrics to use
+ if (!headerMetrics) {
+ const allCols = Object.keys(row);
+ const skip = new Set([tsCol, entityCol].filter(Boolean));
+ if (map.metrics === 'auto') {
+ headerMetrics = allCols.filter(c => !skip.has(c));
+ } else {
+ headerMetrics = map.metrics; // explicit list
+ }
+ }
+
+ for (const rawName of headerMetrics) {
+ const metricName = (map.renames && map.renames[rawName]) ? map.renames[rawName] : rawName;
+ const v = row[rawName];
+ const num = v === '' || v == null ? null : Number(v);
+ const quality = (v === '' || v == null) ? 'missing' : 'ok';
+
+ batch.push({
+ dataset_id: datasetId,
+ entity,
+ metric: metricName,
+ ts, // let Postgres parse ISO or standard timestamp
+ value: Number.isFinite(num) ? num : null,
+ quality_flag: quality
+ });
+
+ if (batch.length >= batchSize) {
+ await flush(client, batch);
+ inserted += batch.length;
+ batch = [];
+ }
+ }
+ }
+
+ if (batch.length) {
+ await flush(client, batch);
+ inserted += batch.length;
+ }
+
+ await client.query('COMMIT');
+ console.log(`Ingest done. Rows inserted: ${inserted}`);
+ process.exit(0);
+ } catch (e) {
+ await pool.query('ROLLBACK');
+ console.error('Ingest failed:', e);
+ process.exit(1);
+ } finally {
+ client.release();
+ }
+})();
+
+async function flush(client, rows) {
+ // multi-row insert
+ const cols = ['dataset_id','entity','metric','ts','value','quality_flag'];
+ const params = [];
+ const values = rows.map((r, i) => {
+ const off = i*cols.length;
+ params.push(r.dataset_id, r.entity, r.metric, r.ts, r.value, r.quality_flag);
+ return `($${off+1}, $${off+2}, $${off+3}, $${off+4}, $${off+5}, $${off+6})`;
+ }).join(',');
+ const sql = `INSERT INTO timeseries_long(${cols.join(',')}) VALUES ${values}`;
+ await client.query(sql, params);
+}
diff --git a/newBackend/server.js b/newBackend/server.js
new file mode 100644
index 0000000..22f76a5
--- /dev/null
+++ b/newBackend/server.js
@@ -0,0 +1,15 @@
+// newBackend/server.js
+require('dotenv').config();
+const app = require('./src/app');
+
+console.log('DB cfg:', {
+ host: process.env.PGHOST,
+ port: process.env.PGPORT,
+ db: process.env.PGDATABASE,
+ user: process.env.PGUSER
+});
+
+const PORT = process.env.PORT || 3000;
+app.listen(PORT, () => {
+ console.log(`Server running on http://localhost:${PORT}`);
+});
diff --git a/newBackend/src/app.js b/newBackend/src/app.js
new file mode 100644
index 0000000..549b7d4
--- /dev/null
+++ b/newBackend/src/app.js
@@ -0,0 +1,24 @@
+const express = require('express');
+const cors = require('cors');
+
+const logger = require('./middleware/logger');
+const apiRouter = require('./routes');
+
+const app = express();
+
+app.use(cors());
+app.use(express.json());
+app.use(logger);
+
+// Root ping
+app.get('/', (_req, res) => res.send('Backend is running'));
+
+// Mount /api routes
+app.use('/api', apiRouter);
+
+// 404 for unknown routes
+app.use((req, res) => {
+ res.status(404).json({ code: 'not_found', message: 'route not found' });
+});
+
+module.exports = app;
diff --git a/newBackend/src/controllers/analyseController.js b/newBackend/src/controllers/analyseController.js
new file mode 100644
index 0000000..6b890a6
--- /dev/null
+++ b/newBackend/src/controllers/analyseController.js
@@ -0,0 +1,7 @@
+exports.analyse = (req, res) => {
+ // You’ll see this in the terminal
+ console.log('Analyse payload:', req.body);
+
+ // Echo back for now (swap in real logic later)
+ res.json({ ok: true, received: req.body });
+};
diff --git a/newBackend/src/controllers/datasetsController.js b/newBackend/src/controllers/datasetsController.js
new file mode 100644
index 0000000..3909240
--- /dev/null
+++ b/newBackend/src/controllers/datasetsController.js
@@ -0,0 +1,29 @@
+const pool = require('../db/pool');
+
+exports.listDatasets = async (_req, res) => {
+ const q = await pool.query('SELECT name FROM datasets ORDER BY name');
+ res.json({ items: q.rows.map(r => ({ id: r.name, name: r.name })) });
+};
+
+exports.datasetMeta = async (req, res) => {
+ const datasetName = req.params.id;
+ const ds = await pool.query('SELECT id FROM datasets WHERE name=$1', [datasetName]);
+ if (ds.rowCount === 0) return res.status(404).json({ code: 'not_found', message: 'dataset not found' });
+ const datasetId = ds.rows[0].id;
+
+ const fieldsQ = await pool.query(
+ 'SELECT DISTINCT metric FROM timeseries_long WHERE dataset_id=$1 ORDER BY metric',
+ [datasetId]
+ );
+ const boundsQ = await pool.query(
+ 'SELECT MIN(ts) AS start, MAX(ts) AS "end" FROM timeseries_long WHERE dataset_id=$1',
+ [datasetId]
+ );
+
+ res.json({
+ datasetId: datasetName,
+ fieldCount: fieldsQ.rowCount,
+ fields: fieldsQ.rows.map(r => r.metric),
+ timeBounds: boundsQ.rows[0]
+ });
+};
diff --git a/newBackend/BackendCode/controllers/mockController.js b/newBackend/src/controllers/mockController.js
similarity index 96%
rename from newBackend/BackendCode/controllers/mockController.js
rename to newBackend/src/controllers/mockController.js
index 6dfa80f..39f96df 100644
--- a/newBackend/BackendCode/controllers/mockController.js
+++ b/newBackend/src/controllers/mockController.js
@@ -1,55 +1,55 @@
-//handles HTTP request logic for mock data routes
-
-const {
- readProcessedData,
- getAvailableStreamNames,
- filterEntriesByStreamNames
-} = require('../services/mockService');
-
-//GET /streams — Returns JSON file containing the stream data
-const getStreams = (req, res) => {
- try {
- const data = readProcessedData();
- res.json(data);
- } catch (err) {
- console.error('Error reading stream data:', err);
- res.status(500).json({ error: 'Failed to load stream data' });
- }
-};
-
-//Get /stream-names — Returns an array of available stream names
-const getStreamNames = (req, res) => {
- try {
- const streamNames = getAvailableStreamNames();
- if (streamNames.length === 0) {
- return res.status(404).json({ error: "No stream names found" });
- }
- res.json(streamNames);
- } catch (err) {
- console.error('Error getting stream names:', err);
- res.status(500).json({ error: 'Failed to get stream names' });
- }
-};
-
-//POST /filter-streams — Returns JSON file by Filtering entries by stream names (without time window)
-const postFilterStreams = (req, res) => {
- const { streamNames } = req.body;
-
- if (!Array.isArray(streamNames) || streamNames.length === 0) {
- return res.status(400).json({ error: 'streamNames must be a non-empty array' });
- }
-
- try {
- const filtered = filterEntriesByStreamNames(streamNames);
- res.json(filtered);
- } catch (err) {
- console.error('Error filtering stream data:', err);
- res.status(500).json({ error: 'Failed to filter stream data' });
- }
-};
-
-module.exports = {
- getStreams,
- getStreamNames,
- postFilterStreams
+//handles HTTP request logic for mock data routes
+
+const {
+ readProcessedData,
+ getAvailableStreamNames,
+ filterEntriesByStreamNames
+} = require('../services/mockService');
+
+//GET /streams — Returns JSON file containing the stream data
+const getStreams = (req, res) => {
+ try {
+ const data = readProcessedData();
+ res.json(data);
+ } catch (err) {
+ console.error('Error reading stream data:', err);
+ res.status(500).json({ error: 'Failed to load stream data' });
+ }
+};
+
+//Get /stream-names — Returns an array of available stream names
+const getStreamNames = (req, res) => {
+ try {
+ const streamNames = getAvailableStreamNames();
+ if (streamNames.length === 0) {
+ return res.status(404).json({ error: "No stream names found" });
+ }
+ res.json(streamNames);
+ } catch (err) {
+ console.error('Error getting stream names:', err);
+ res.status(500).json({ error: 'Failed to get stream names' });
+ }
+};
+
+//POST /filter-streams — Returns JSON file by Filtering entries by stream names (without time window)
+const postFilterStreams = (req, res) => {
+ const { streamNames } = req.body;
+
+ if (!Array.isArray(streamNames) || streamNames.length === 0) {
+ return res.status(400).json({ error: 'streamNames must be a non-empty array' });
+ }
+
+ try {
+ const filtered = filterEntriesByStreamNames(streamNames);
+ res.json(filtered);
+ } catch (err) {
+ console.error('Error filtering stream data:', err);
+ res.status(500).json({ error: 'Failed to filter stream data' });
+ }
+};
+
+module.exports = {
+ getStreams,
+ getStreamNames,
+ postFilterStreams
};
\ No newline at end of file
diff --git a/newBackend/src/controllers/seriesController.js b/newBackend/src/controllers/seriesController.js
new file mode 100644
index 0000000..a4e9bed
--- /dev/null
+++ b/newBackend/src/controllers/seriesController.js
@@ -0,0 +1,73 @@
+// newBackend/src/controllers/seriesController.js
+const pool = require('../db/pool');
+
+const INTERVALS = new Set(['raw', '5min', '15min', '1h', '6h']);
+
+exports.getSeries = async (req, res) => {
+ // See exactly what the frontend sent:
+ console.log('📥 GET /api/series query:', req.query);
+
+ const { datasetId, stream, interval = 'raw', from, to } = req.query;
+
+ if (!datasetId || !stream || !from || !to) {
+ return res.status(400).json({
+ code: 'bad_request',
+ message: 'datasetId, stream, from, to required'
+ });
+ }
+ if (!INTERVALS.has(interval)) {
+ return res.status(400).json({
+ code: 'bad_interval',
+ message: 'use raw, 5min, 15min, 1h, 6h'
+ });
+ }
+
+ // Resolve dataset id
+ const ds = await pool.query('SELECT id FROM datasets WHERE name=$1', [datasetId]);
+ if (ds.rowCount === 0) {
+ return res.status(404).json({ code: 'not_found', message: 'dataset not found' });
+ }
+ const id = ds.rows[0].id;
+
+ // RAW: include quality_flag so frontend can color anomalies
+ if (interval === 'raw') {
+ const q = `
+ SELECT ts, value, quality_flag
+ FROM timeseries_long
+ WHERE dataset_id = $1
+ AND metric = $2
+ AND ts BETWEEN $3 AND $4
+ ORDER BY ts
+ `;
+ const { rows } = await pool.query(q, [id, stream, from, to]);
+
+ // Optional backend-side count log
+ console.log(`✅ ${datasetId}/${stream} raw → ${rows.length} rows`);
+
+ return res.json({ datasetId, stream, interval, from, to, series: rows });
+ }
+
+ // BUCKETED: average values per bucket (no boolean aggregation yet)
+ const bucketExpr = ({
+ '5min': `date_trunc('minute', ts) - make_interval(mins => EXTRACT(minute FROM ts)::int % 5)`,
+ '15min': `date_trunc('minute', ts) - make_interval(mins => EXTRACT(minute FROM ts)::int % 15)`,
+ '1h': `date_trunc('hour', ts)`,
+ '6h': `date_trunc('hour', ts) - make_interval(hours => EXTRACT(hour FROM ts)::int % 6)`
+ })[interval];
+
+ const q = `
+ WITH b AS (
+ SELECT ${bucketExpr} AS bucket, value
+ FROM timeseries_long
+ WHERE dataset_id=$1 AND metric=$2 AND ts BETWEEN $3 AND $4
+ )
+ SELECT bucket AS ts, AVG(value) AS value
+ FROM b
+ GROUP BY bucket
+ ORDER BY bucket
+ `;
+ const { rows } = await pool.query(q, [id, stream, from, to]);
+
+ console.log(`✅ ${datasetId}/${stream} ${interval} → ${rows.length} rows`);
+ res.json({ datasetId, stream, interval, from, to, series: rows });
+};
diff --git a/newBackend/src/controllers/timestampsController.js b/newBackend/src/controllers/timestampsController.js
new file mode 100644
index 0000000..fb7033c
--- /dev/null
+++ b/newBackend/src/controllers/timestampsController.js
@@ -0,0 +1,30 @@
+const pool = require('../db/pool');
+
+// GET /api/timestamps?datasetId=sensor1&limit=1000
+// Returns: { timestamps: ["2025-03-18T06:54:26.000Z", ...] }
+exports.listTimestamps = async (req, res) => {
+ const { datasetId, limit } = req.query;
+ if (!datasetId) {
+ return res.status(400).json({ code: 'bad_request', message: 'datasetId is required' });
+ }
+
+ const lim = Math.min(Math.max(parseInt(limit || '1000', 10), 1), 10000); // 1..10000
+
+ const ds = await pool.query('SELECT id FROM datasets WHERE name=$1', [datasetId]);
+ if (ds.rowCount === 0) {
+ return res.status(404).json({ code: 'not_found', message: 'dataset not found' });
+ }
+ const id = ds.rows[0].id;
+
+ // Distinct timestamps ordered; capped by "limit"
+ const q = `
+ SELECT DISTINCT ts
+ FROM timeseries_long
+ WHERE dataset_id = $1
+ ORDER BY ts
+ LIMIT $2
+ `;
+ const { rows } = await pool.query(q, [id, lim]);
+
+ res.json({ timestamps: rows.map(r => r.ts.toISOString()) });
+};
diff --git a/newBackend/src/db/pool.js b/newBackend/src/db/pool.js
new file mode 100644
index 0000000..bd941df
--- /dev/null
+++ b/newBackend/src/db/pool.js
@@ -0,0 +1,9 @@
+const { Pool } = require('pg');
+const pool = new Pool({
+ host: process.env.PGHOST,
+ port: process.env.PGPORT,
+ database: process.env.PGDATABASE,
+ user: process.env.PGUSER,
+ password: process.env.PGPASSWORD
+});
+module.exports = pool;
diff --git a/newBackend/src/middleware/logger.js b/newBackend/src/middleware/logger.js
new file mode 100644
index 0000000..647e165
--- /dev/null
+++ b/newBackend/src/middleware/logger.js
@@ -0,0 +1,13 @@
+module.exports = (req, res, next) => {
+ const t0 = Date.now();
+ res.on('finish', () => {
+ console.log(
+ new Date().toISOString(),
+ req.method,
+ req.originalUrl,
+ res.statusCode,
+ (Date.now() - t0) + 'ms'
+ );
+ });
+ next();
+};
diff --git a/newBackend/BackendCode/mock_data/processed_data.json b/newBackend/src/mock_data/processed_data.json
similarity index 100%
rename from newBackend/BackendCode/mock_data/processed_data.json
rename to newBackend/src/mock_data/processed_data.json
diff --git a/newBackend/BackendCode/repositories/mockRepository.js b/newBackend/src/repositories/mockRepository.js
similarity index 96%
rename from newBackend/BackendCode/repositories/mockRepository.js
rename to newBackend/src/repositories/mockRepository.js
index d1f3907..0ce6f2a 100644
--- a/newBackend/BackendCode/repositories/mockRepository.js
+++ b/newBackend/src/repositories/mockRepository.js
@@ -1,24 +1,24 @@
-//handles data access for mock data, reading from local JSON file without a database yet
-
-require('dotenv').config({ path: '../.env' });
-
-const fs = require('fs');
-const path = require('path');
-
-class MockRepository {
- constructor() {
- this.filePath = path.resolve(process.env.PROCESSED_DATA_PATH);
- }
-
- getMockData() {
- try {
- const rawData = fs.readFileSync(this.filePath, 'utf8');
- return JSON.parse(rawData);
- } catch (err) {
- console.error('Error reading mock data:', err);
- throw new Error('Failed to read mock data');
- }
- }
-}
-
+//handles data access for mock data, reading from local JSON file without a database yet
+
+require('dotenv').config({ path: '../.env' });
+
+const fs = require('fs');
+const path = require('path');
+
+class MockRepository {
+ constructor() {
+ this.filePath = path.resolve(process.env.PROCESSED_DATA_PATH);
+ }
+
+ getMockData() {
+ try {
+ const rawData = fs.readFileSync(this.filePath, 'utf8');
+ return JSON.parse(rawData);
+ } catch (err) {
+ console.error('Error reading mock data:', err);
+ throw new Error('Failed to read mock data');
+ }
+ }
+}
+
module.exports = MockRepository;
\ No newline at end of file
diff --git a/newBackend/src/routes/analyseRoutes.js b/newBackend/src/routes/analyseRoutes.js
new file mode 100644
index 0000000..b1cb9e0
--- /dev/null
+++ b/newBackend/src/routes/analyseRoutes.js
@@ -0,0 +1,6 @@
+const router = require('express').Router();
+const { analyse } = require('../controllers/analyseController');
+
+router.post('/analyse', analyse);
+
+module.exports = router;
diff --git a/newBackend/src/routes/datasetsRoutes.js b/newBackend/src/routes/datasetsRoutes.js
new file mode 100644
index 0000000..88caab0
--- /dev/null
+++ b/newBackend/src/routes/datasetsRoutes.js
@@ -0,0 +1,7 @@
+const router = require('express').Router();
+const { listDatasets, datasetMeta } = require('../controllers/datasetsController');
+
+router.get('/datasets', listDatasets); // GET /api/datasets
+router.get('/datasets/:id/meta', datasetMeta); // GET /api/datasets/:id/meta
+
+module.exports = router;
diff --git a/newBackend/src/routes/index.js b/newBackend/src/routes/index.js
new file mode 100644
index 0000000..f0111ed
--- /dev/null
+++ b/newBackend/src/routes/index.js
@@ -0,0 +1,10 @@
+const router = require('express').Router();
+
+//router.use(require('./health.routes')); // GET /api/health
+router.use(require('./analyseRoutes')); // POST /api/analyse
+
+router.use(require('./datasetsRoutes'));
+router.use(require('./seriesRoutes'));
+router.use(require('./timestampsRoutes'));
+
+module.exports = router;
diff --git a/newBackend/BackendCode/routes/mock.js b/newBackend/src/routes/mock.js
similarity index 95%
rename from newBackend/BackendCode/routes/mock.js
rename to newBackend/src/routes/mock.js
index 3f98cda..5c1df89 100644
--- a/newBackend/BackendCode/routes/mock.js
+++ b/newBackend/src/routes/mock.js
@@ -1,78 +1,78 @@
-//handles routing for mock data endpoints
-
-const express = require('express');
-const {
- getStreams,
- getStreamNames,
- postFilterStreams
-} = require('../controllers/mockController');
-
-const router = express.Router();
-
-/*
- * GET /streams
- *
- * Description:
- * Returns the dataset in JSON format,
- * containing all entries including metadata (e.g., created_at, entry_id) and multiple stream values.
- *
- * Example Response:
- * [
- * {
- * "created_at": "2025-03-19T15:01:59.000Z",
- * "entry_id": 3242057,
- * "Temperature": 22,
- * "Voltage Charge": 12.51,
- * "Humidity": 45
- * },
- * ...
- * ]
- */
-router.get('/streams', getStreams);
-
-/*
- * GET /stream-names
- *
- * Description:
- * Returns an array of available stream names (in string format) extracted from the dataset
- *
- * Example Response:
- * [
- * "Temperature",
- * "Voltage Charge",
- * "Humidity",
- * "Current Draw"
- * ]
- */
-router.get("/stream-names", getStreamNames);
-
-/*
- * POST /filter-streams
- * Request Body:
- * {
- * streamNames: [ "Temperature", "Voltage Charge" ]
- * }
- *
- * Description:
- * Returns the specified stream names and timestamp,
- * with the entries in original format.
- *
- * Example Response:
- * [
- * {
- * "created_at": "2025-03-19T15:01:59.000Z",
- * "entry_id": 3242057,
- * "Temperature": 22,
- * "Voltage Charge": 12.51
- * },
- * {
- * "created_at": "2025-03-19T15:02:29.000Z",
- * "entry_id": 3242058,
- * "Temperature": 22,
- * "Voltage Charge": 12.61
- * }
- * ]
- */
-router.post('/filter-streams', postFilterStreams);
-
+//handles routing for mock data endpoints
+
+const express = require('express');
+const {
+ getStreams,
+ getStreamNames,
+ postFilterStreams
+} = require('../controllers/mockController');
+
+const router = express.Router();
+
+/*
+ * GET /streams
+ *
+ * Description:
+ * Returns the dataset in JSON format,
+ * containing all entries including metadata (e.g., created_at, entry_id) and multiple stream values.
+ *
+ * Example Response:
+ * [
+ * {
+ * "created_at": "2025-03-19T15:01:59.000Z",
+ * "entry_id": 3242057,
+ * "Temperature": 22,
+ * "Voltage Charge": 12.51,
+ * "Humidity": 45
+ * },
+ * ...
+ * ]
+ */
+router.get('/streams', getStreams);
+
+/*
+ * GET /stream-names
+ *
+ * Description:
+ * Returns an array of available stream names (in string format) extracted from the dataset
+ *
+ * Example Response:
+ * [
+ * "Temperature",
+ * "Voltage Charge",
+ * "Humidity",
+ * "Current Draw"
+ * ]
+ */
+router.get("/stream-names", getStreamNames);
+
+/*
+ * POST /filter-streams
+ * Request Body:
+ * {
+ * streamNames: [ "Temperature", "Voltage Charge" ]
+ * }
+ *
+ * Description:
+ * Returns the specified stream names and timestamp,
+ * with the entries in original format.
+ *
+ * Example Response:
+ * [
+ * {
+ * "created_at": "2025-03-19T15:01:59.000Z",
+ * "entry_id": 3242057,
+ * "Temperature": 22,
+ * "Voltage Charge": 12.51
+ * },
+ * {
+ * "created_at": "2025-03-19T15:02:29.000Z",
+ * "entry_id": 3242058,
+ * "Temperature": 22,
+ * "Voltage Charge": 12.61
+ * }
+ * ]
+ */
+router.post('/filter-streams', postFilterStreams);
+
module.exports = router;
\ No newline at end of file
diff --git a/newBackend/src/routes/seriesRoutes.js b/newBackend/src/routes/seriesRoutes.js
new file mode 100644
index 0000000..046a079
--- /dev/null
+++ b/newBackend/src/routes/seriesRoutes.js
@@ -0,0 +1,6 @@
+const router = require('express').Router();
+const { getSeries } = require('../controllers/seriesController');
+
+router.get('/series', getSeries); // GET /api/series?datasetId=&stream=&interval=&from=&to=
+
+module.exports = router;
diff --git a/newBackend/src/routes/timestampsRoutes.js b/newBackend/src/routes/timestampsRoutes.js
new file mode 100644
index 0000000..312eec5
--- /dev/null
+++ b/newBackend/src/routes/timestampsRoutes.js
@@ -0,0 +1,6 @@
+const router = require('express').Router();
+const { listTimestamps } = require('../controllers/timestampsController');
+
+router.get('/timestamps', listTimestamps);
+
+module.exports = router;
diff --git a/newBackend/BackendCode/services/mockService.js b/newBackend/src/services/mockService.js
similarity index 96%
rename from newBackend/BackendCode/services/mockService.js
rename to newBackend/src/services/mockService.js
index 3604b32..9308d71 100644
--- a/newBackend/BackendCode/services/mockService.js
+++ b/newBackend/src/services/mockService.js
@@ -1,42 +1,42 @@
-//handles the logic for processing mock data, using the repository for data access
-
-const MockRepository = require('../repositories/mockRepository');
-const mockRepository = new MockRepository();
-
-//get all entries from the .json file
-const readProcessedData = () => {
- return mockRepository.getMockData();
-};
-
-const getAvailableStreamNames = () => {
- const entries = mockRepository.getMockData();
- if (!entries || entries.length === 0) return [];
-
- const excludedKeys = ["created_at", "entry_id", "was_interpolated"];
- return Object.keys(entries[0]).filter(key => !excludedKeys.includes(key));
-};
-
-const filterEntriesByStreamNames = (streamNames) => {
- const entries = mockRepository.getMockData();
-
- return entries.map(entry => {
- const filteredEntry = {
- created_at: entry.created_at,
- entry_id: entry.entry_id
- };
-
- streamNames.forEach(name => {
- if (entry[name] !== undefined) {
- filteredEntry[name] = entry[name];
- }
- });
-
- return filteredEntry;
- });
-};
-
-module.exports = {
- readProcessedData,
- getAvailableStreamNames,
- filterEntriesByStreamNames
+//handles the logic for processing mock data, using the repository for data access
+
+const MockRepository = require('../repositories/mockRepository');
+const mockRepository = new MockRepository();
+
+//get all entries from the .json file
+const readProcessedData = () => {
+ return mockRepository.getMockData();
+};
+
+const getAvailableStreamNames = () => {
+ const entries = mockRepository.getMockData();
+ if (!entries || entries.length === 0) return [];
+
+ const excludedKeys = ["created_at", "entry_id", "was_interpolated"];
+ return Object.keys(entries[0]).filter(key => !excludedKeys.includes(key));
+};
+
+const filterEntriesByStreamNames = (streamNames) => {
+ const entries = mockRepository.getMockData();
+
+ return entries.map(entry => {
+ const filteredEntry = {
+ created_at: entry.created_at,
+ entry_id: entry.entry_id
+ };
+
+ streamNames.forEach(name => {
+ if (entry[name] !== undefined) {
+ filteredEntry[name] = entry[name];
+ }
+ });
+
+ return filteredEntry;
+ });
+};
+
+module.exports = {
+ readProcessedData,
+ getAvailableStreamNames,
+ filterEntriesByStreamNames
};
\ No newline at end of file
diff --git a/sql/schema.sql b/sql/schema.sql
new file mode 100644
index 0000000..44333a6
--- /dev/null
+++ b/sql/schema.sql
@@ -0,0 +1,37 @@
+-- 1) Datasets
+CREATE TABLE IF NOT EXISTS datasets (
+ id SERIAL PRIMARY KEY,
+ name TEXT NOT NULL UNIQUE,
+ description TEXT,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT now()
+);
+
+-- 2) Long-format readings (works for any columns)
+CREATE TABLE IF NOT EXISTS timeseries_long (
+ id BIGSERIAL PRIMARY KEY,
+ dataset_id INT NOT NULL REFERENCES datasets(id) ON DELETE CASCADE,
+ entity TEXT, -- sensor/device id (nullable)
+ metric TEXT NOT NULL, -- e.g., field/measurement name
+ ts TIMESTAMPTZ NOT NULL, -- UTC timestamp
+ value DOUBLE PRECISION, -- numeric value
+ quality_flag TEXT NOT NULL DEFAULT 'ok'
+ CHECK (quality_flag IN ('ok','imputed','missing'))
+);
+CREATE INDEX IF NOT EXISTS idx_ts_long_ds_metric_ts ON timeseries_long(dataset_id, metric, ts);
+CREATE INDEX IF NOT EXISTS idx_ts_long_ds_entity_ts ON timeseries_long(dataset_id, entity, ts);
+
+-- 3) (Optional) Store anomaly results
+CREATE TABLE IF NOT EXISTS anomalies (
+ id BIGSERIAL PRIMARY KEY,
+ dataset_id INT NOT NULL REFERENCES datasets(id) ON DELETE CASCADE,
+ entity TEXT,
+ metric TEXT NOT NULL,
+ ts TIMESTAMPTZ NOT NULL,
+ value DOUBLE PRECISION,
+ score DOUBLE PRECISION NOT NULL,
+ label TEXT,
+ algo_version TEXT,
+ params JSONB,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT now()
+);
+CREATE INDEX IF NOT EXISTS idx_anoms_ds_metric_ts ON anomalies(dataset_id, metric, ts);