Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,8 @@
PROCESSED_DATA_PATH=./mock_data/processed_data.json

# Path to raw CSV dataset
RAW_CSV_PATH=./mock_data/2881821.csv
RAW_CSV_PATH=./mock_data/2881821.csv
PROCESSED_DATA_PATH=BackendCode/data/mock.json
RAW_CSV_PATH=./path/to/your/raw.csv
PYTHON_URL=http://localhost:5000
PORT=3000
5 changes: 5 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
PORT=3000
DATASETS_PATH=./data/datasets.json
DEVICES_PATH=./data/devices.json
SENSORSTREAMS_PATH=./data/sensorstreams.json
SENSORDATA_PATH=./data/sensordata.json
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"CodeGPT.apiKey": "CodeGPT Plus Beta"
}
83 changes: 83 additions & 0 deletions BackendCode/__tests__/api.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
const request = require('supertest');
const app = require('../server'); // Adjust the path if needed
require('dotenv').config({ path: './.env' });
describe('API Endpoint Integration Tests', () => {
// Consider setting up a test database or mock data before running these tests

it('GET /api/streams should return an array of stream entries', async () => {
const res = await request(app).get('/api/streams');
expect(res.statusCode).toBe(200);
expect(Array.isArray(res.body)).toBe(true);
});

it('GET /api/stream-names should return array of available stream names', async () => {
const res = await request(app).get('/api/stream-names');
expect(res.statusCode).toBe(200);
expect(Array.isArray(res.body)).toBe(true);
});

it('POST /api/filter-streams should return filtered stream entries', async () => {
// Adjust streamNames below to match your sample data
const res = await request(app)
.post('/api/filter-streams')
.send({ streamNames: ['Temperature'] });
expect(res.statusCode).toBe(200);
expect(Array.isArray(res.body)).toBe(true);
});

it('POST /api/analyze should return analysis result', async () => {
// Use valid streams and time range based on your dataset
const res = await request(app)
.post('/api/analyze')
.send({
streams: ['s1', 's2', 's3'],
start_date: "2025-01-01T00:00:00",
end_date: "2025-01-06T00:10:00",
threshold: 0.5,
algo_type: "correlation"
});
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
});

it('POST /api/analyze-csv should return a cleaned/interpolated CSV file', async () => {
const res = await request(app)
.post('/api/analyze-csv')
.send({ window_size: 15 });
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
if (res.statusCode === 200) {
expect(res.type).toBe('text/csv');
}
});

it('POST /api/analyze-corr should return correlation matrix', async () => {
const res = await request(app)
.post('/api/analyze-corr')
.send({
streams: ['s1', 's2', 's3'],
start_date: "2025-01-01T00:00:00",
end_date: "2025-01-06T00:10:00",
threshold: 0.5
});
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
});

it('POST /api/visualize should return a base64-encoded image string', async () => {
const res = await request(app)
.post('/api/visualize')
.send({
streams: ['field1', 'field2', 'field3'],
start_date: "2025-03-18T06:55:31",
end_date: "2025-03-18T07:00:43",
type: "grouped_bar_chart"
});
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
if (res.statusCode === 200) {
expect(res.body).toHaveProperty('image');
expect(typeof res.body.image).toBe('string');
}
});
});
11 changes: 4 additions & 7 deletions BackendCode/controllers/mockController.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@ const {
getAvailableStreamNames,
filterEntriesByStreamNames
} = require('../services/mockService');
const { mapEntriesDto } = require('../dtos');

//GET /streams — Returns JSON file containing the stream data
const getStreams = (req, res) => {
try {
const data = readProcessedData();
res.json(data);
res.json(mapEntriesDto(data));
} catch (err) {
console.error('Error reading stream data:', err);
res.status(500).json({ error: 'Failed to load stream data' });
Expand All @@ -33,15 +34,11 @@ const getStreamNames = (req, res) => {

//POST /filter-streams — Returns JSON file by Filtering entries by stream names (without time window)
const postFilterStreams = (req, res) => {
const { streamNames } = req.body;

if (!Array.isArray(streamNames) || streamNames.length === 0) {
return res.status(400).json({ error: 'streamNames must be a non-empty array' });
}
const { streamNames } = req.validatedBody || req.body;

try {
const filtered = filterEntriesByStreamNames(streamNames);
res.json(filtered);
res.json(mapEntriesDto(filtered));
} catch (err) {
console.error('Error filtering stream data:', err);
res.status(500).json({ error: 'Failed to filter stream data' });
Expand Down
8 changes: 4 additions & 4 deletions BackendCode/controllers/pythonController.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const {
// POST /analyze — correlation analysis
const postAnalyze = async (req, res) => {
try {
const result = await analyze(req.body);
const result = await analyze(req.validatedBody || req.body);
res.json(result);
} catch (err) {
console.error('Analyze error:', err.message);
Expand All @@ -21,7 +21,7 @@ const postAnalyze = async (req, res) => {
// POST /visualize — returns base64 image
const postVisualize = async (req, res) => {
try {
const result = await visualize(req.body);
const result = await visualize(req.validatedBody || req.body);
res.json(result);
} catch (err) {
console.error('Visualization error:', err.message);
Expand All @@ -32,7 +32,7 @@ const postVisualize = async (req, res) => {
// POST /analyze-csv — returns cleaned CSV
const postAnalyzeCsv = async (req, res) => {
try {
const result = await analyzeCsv(req.body);
const result = await analyzeCsv(req.validatedBody || req.body);
res.setHeader('Content-Disposition', 'attachment; filename=report.csv');
res.setHeader('Content-Type', 'text/csv');
res.send(result);
Expand All @@ -45,7 +45,7 @@ const postAnalyzeCsv = async (req, res) => {
// POST /analyze-corr — returns correlation matrix
const postAnalyzeCorr = async (req, res) => {
try {
const result = await analyzeCorr(req.body);
const result = await analyzeCorr(req.validatedBody || req.body);
res.json(result);
} catch (err) {
console.error('Analyze Corr error:', err.message);
Expand Down
4 changes: 4 additions & 0 deletions BackendCode/data/mock.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[
{ "id": 1, "name": "Stream A", "value": 123 },
{ "id": 2, "name": "Stream B", "value": 456 }
]
21 changes: 21 additions & 0 deletions BackendCode/dtos/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
// Simple DTO mappers to keep outputs consistent

// For streams and filtered streams, ensure keys are ordered and explicit
const mapEntryDto = (entry) => {
const { created_at, entry_id, was_interpolated, ...rest } = entry;
return {
created_at,
entry_id,
...rest,
...(was_interpolated !== undefined ? { was_interpolated } : {})
};
};

const mapEntriesDto = (entries) => entries.map(mapEntryDto);

module.exports = {
mapEntryDto,
mapEntriesDto
};


1 change: 1 addition & 0 deletions BackendCode/jest.setup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
require('dotenv').config({ path: './.env' });
30 changes: 30 additions & 0 deletions BackendCode/middleware/validateRequest.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
// reusable request validation middleware powered by Zod

const validateRequest = ({ params, query, body }) => {
return (req, res, next) => {
try {
if (params) {
req.validatedParams = params.parse(req.params);
}
if (query) {
req.validatedQuery = query.parse(req.query);
}
if (body) {
req.validatedBody = body.parse(req.body);
}
next();
} catch (err) {
if (err && err.errors) {
return res.status(400).json({
error: 'ValidationError',
issues: err.errors.map(e => ({ path: e.path, message: e.message }))
});
}
return res.status(400).json({ error: 'Invalid request' });
}
};
};

module.exports = { validateRequest };


22 changes: 16 additions & 6 deletions BackendCode/routes/mock.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,16 @@ const {
} = require('../controllers/pythonController');

const router = express.Router();
const { z } = require('zod');
const { validateRequest } = require('../middleware/validateRequest');
const {
getStreamsSchema,
getStreamNamesSchema,
filterStreamsBody,
analyzeBody,
analyzeCsvBody,
analyzeCorrBody
} = require('../validation/schemas');

/*
* GET /streams
Expand All @@ -35,7 +45,7 @@ const router = express.Router();
* ...
* ]
*/
router.get('/streams', getStreams);
router.get('/streams', validateRequest(getStreamsSchema), getStreams);

/*
* GET /stream-names
Expand All @@ -51,7 +61,7 @@ router.get('/streams', getStreams);
* "Current Draw"
* ]
*/
router.get("/stream-names", getStreamNames);
router.get('/stream-names', validateRequest(getStreamNamesSchema), getStreamNames);

/*
* POST /filter-streams
Expand Down Expand Up @@ -80,7 +90,7 @@ router.get("/stream-names", getStreamNames);
* }
* ]
*/
router.post('/filter-streams', postFilterStreams);
router.post('/filter-streams', validateRequest({ body: filterStreamsBody }), postFilterStreams);

/*
* POST /analyze
Expand All @@ -107,7 +117,7 @@ router.post('/filter-streams', postFilterStreams);
* }
* }
*/
router.post('/analyze', postAnalyze);
router.post('/analyze', validateRequest({ body: analyzeBody }), postAnalyze);

/*
* POST /analyze-csv
Expand All @@ -128,7 +138,7 @@ router.post('/analyze', postAnalyze);
* - Generated timestamps
* - Cleaned structure with no empty rows or columns
*/
router.post('/analyze-csv', postAnalyzeCsv);
router.post('/analyze-csv', validateRequest({ body: analyzeCsvBody }), postAnalyzeCsv);

/*
* POST /analyze-corr
Expand Down Expand Up @@ -157,7 +167,7 @@ router.post('/analyze-csv', postAnalyzeCsv);
* ]
* }
*/
router.post('/analyze-corr', postAnalyzeCorr); //not working yet, need to test
router.post('/analyze-corr', validateRequest({ body: analyzeCorrBody }), postAnalyzeCorr); //not working yet, need to test

/*
* POST /visualize
Expand Down
3 changes: 2 additions & 1 deletion BackendCode/server.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@ app.use('/api', mockRoutes);
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`Server running on http://localhost:${PORT}`);
});
});
module.exports = app;
48 changes: 48 additions & 0 deletions BackendCode/validation/schemas.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
const { z } = require('zod');

// Common schemas
const isoDateString = z.string().datetime({ offset: true }).describe('ISO datetime with timezone offset');

// GET /streams — no input
const getStreamsSchema = {};

// GET /stream-names — no input
const getStreamNamesSchema = {};

// POST /filter-streams
const filterStreamsBody = z.object({
streamNames: z.array(z.string().min(1)).min(1, 'At least one stream name is required')
});

// POST /analyze
const analyzeBody = z.object({
streams: z.array(z.string()).min(1),
start_date: isoDateString,
end_date: isoDateString,
threshold: z.number().min(0).max(1),
algo_type: z.enum(['correlation']).default('correlation')
});

// POST /analyze-csv
const analyzeCsvBody = z.object({
window_size: z.number().int().positive().optional()
});

// POST /analyze-corr
const analyzeCorrBody = z.object({
streams: z.array(z.string()).min(2),
start_date: isoDateString,
end_date: isoDateString,
threshold: z.number().min(0).max(1).optional()
});

module.exports = {
getStreamsSchema,
getStreamNamesSchema,
filterStreamsBody,
analyzeBody,
analyzeCsvBody,
analyzeCorrBody
};


Loading