Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions BackendCode/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# IoT Backend API

A Node.js Express API for the Intelligent IoT Data Management Platform.

## Quick Start

1. **Install Dependencies:**
```bash
npm install
```

2. **Start the Server:**
```bash
npm start
```
Or use the startup script:
```bash
node start.js
```

3. **Test the API:**
```bash
curl http://localhost:3000/api/streams
```

## API Endpoints

- `GET /api/streams` - Get all sensor data
- `GET /api/stream-names` - Get available stream names
- `POST /api/filter-streams` - Filter data by stream names
- `POST /api/analyze` - Run correlation analysis
- `POST /api/analyze-corr` - Get correlation matrix
- `POST /api/visualize` - Generate visualizations

## Project Structure

```
BackendCode/
├── controllers/ # Request handlers
├── services/ # Business logic
├── repositories/ # Data access
├── middleware/ # Request validation
├── validation/ # Zod schemas
├── dtos/ # Data transfer objects
├── mock_data/ # Sample data files
└── __tests__/ # Unit tests
```

## Dependencies

- **express** - Web framework
- **cors** - Cross-origin requests
- **zod** - Schema validation
- **dotenv** - Environment variables

## Development

- **nodemon** - Auto-restart on changes
- **jest** - Testing framework
- **supertest** - API testing

## Data Sources

The API uses mock data from `mock_data/processed_data.json` for development. This contains sample IoT sensor readings that can be used for testing the frontend integration.
83 changes: 83 additions & 0 deletions BackendCode/__tests__/api.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
const request = require('supertest');
const app = require('../server'); // Adjust the path if needed
require('dotenv').config({ path: './.env' });
describe('API Endpoint Integration Tests', () => {
// Consider setting up a test database or mock data before running these tests

it('GET /api/streams should return an array of stream entries', async () => {
const res = await request(app).get('/api/streams');
expect(res.statusCode).toBe(200);
expect(Array.isArray(res.body)).toBe(true);
});

it('GET /api/stream-names should return array of available stream names', async () => {
const res = await request(app).get('/api/stream-names');
expect(res.statusCode).toBe(200);
expect(Array.isArray(res.body)).toBe(true);
});

it('POST /api/filter-streams should return filtered stream entries', async () => {
// Adjust streamNames below to match your sample data
const res = await request(app)
.post('/api/filter-streams')
.send({ streamNames: ['Temperature'] });
expect(res.statusCode).toBe(200);
expect(Array.isArray(res.body)).toBe(true);
});

it('POST /api/analyze should return analysis result', async () => {
// Use valid streams and time range based on your dataset
const res = await request(app)
.post('/api/analyze')
.send({
streams: ['s1', 's2', 's3'],
start_date: "2025-01-01T00:00:00",
end_date: "2025-01-06T00:10:00",
threshold: 0.5,
algo_type: "correlation"
});
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
});

it('POST /api/analyze-csv should return a cleaned/interpolated CSV file', async () => {
const res = await request(app)
.post('/api/analyze-csv')
.send({ window_size: 15 });
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
if (res.statusCode === 200) {
expect(res.type).toBe('text/csv');
}
});

it('POST /api/analyze-corr should return correlation matrix', async () => {
const res = await request(app)
.post('/api/analyze-corr')
.send({
streams: ['s1', 's2', 's3'],
start_date: "2025-01-01T00:00:00",
end_date: "2025-01-06T00:10:00",
threshold: 0.5
});
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
});

it('POST /api/visualize should return a base64-encoded image string', async () => {
const res = await request(app)
.post('/api/visualize')
.send({
streams: ['field1', 'field2', 'field3'],
start_date: "2025-03-18T06:55:31",
end_date: "2025-03-18T07:00:43",
type: "grouped_bar_chart"
});
// Accept 200 or error if Python service is unavailable
expect([200, 500]).toContain(res.statusCode);
if (res.statusCode === 200) {
expect(res.body).toHaveProperty('image');
expect(typeof res.body.image).toBe('string');
}
});
});
Original file line number Diff line number Diff line change
@@ -1,55 +1,52 @@
//handles HTTP request logic for mock data routes

const {
readProcessedData,
getAvailableStreamNames,
filterEntriesByStreamNames
} = require('../services/mockService');

//GET /streams — Returns JSON file containing the stream data
const getStreams = (req, res) => {
try {
const data = readProcessedData();
res.json(data);
} catch (err) {
console.error('Error reading stream data:', err);
res.status(500).json({ error: 'Failed to load stream data' });
}
};

//Get /stream-names — Returns an array of available stream names
const getStreamNames = (req, res) => {
try {
const streamNames = getAvailableStreamNames();
if (streamNames.length === 0) {
return res.status(404).json({ error: "No stream names found" });
}
res.json(streamNames);
} catch (err) {
console.error('Error getting stream names:', err);
res.status(500).json({ error: 'Failed to get stream names' });
}
};

//POST /filter-streams — Returns JSON file by Filtering entries by stream names (without time window)
const postFilterStreams = (req, res) => {
const { streamNames } = req.body;

if (!Array.isArray(streamNames) || streamNames.length === 0) {
return res.status(400).json({ error: 'streamNames must be a non-empty array' });
}

try {
const filtered = filterEntriesByStreamNames(streamNames);
res.json(filtered);
} catch (err) {
console.error('Error filtering stream data:', err);
res.status(500).json({ error: 'Failed to filter stream data' });
}
};

module.exports = {
getStreams,
getStreamNames,
postFilterStreams
//handles HTTP request logic for mock data routes

const {
readProcessedData,
getAvailableStreamNames,
filterEntriesByStreamNames
} = require('../services/mockService');
const { mapEntriesDto } = require('../dtos');

//GET /streams — Returns JSON file containing the stream data
const getStreams = (req, res) => {
try {
const data = readProcessedData();
res.json(mapEntriesDto(data));
} catch (err) {
console.error('Error reading stream data:', err);
res.status(500).json({ error: 'Failed to load stream data' });
}
};

//Get /stream-names — Returns an array of available stream names
const getStreamNames = (req, res) => {
try {
const streamNames = getAvailableStreamNames();
if (streamNames.length === 0) {
return res.status(404).json({ error: "No stream names found" });
}
res.json(streamNames);
} catch (err) {
console.error('Error getting stream names:', err);
res.status(500).json({ error: 'Failed to get stream names' });
}
};

//POST /filter-streams — Returns JSON file by Filtering entries by stream names (without time window)
const postFilterStreams = (req, res) => {
const { streamNames } = req.validatedBody || req.body;

try {
const filtered = filterEntriesByStreamNames(streamNames);
res.json(mapEntriesDto(filtered));
} catch (err) {
console.error('Error filtering stream data:', err);
res.status(500).json({ error: 'Failed to filter stream data' });
}
};

module.exports = {
getStreams,
getStreamNames,
postFilterStreams
};
61 changes: 61 additions & 0 deletions BackendCode/controllers/pythonController.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// handles HTTP request logic for Python-backed endpoints

const {
analyze,
visualize,
analyzeCsv,
analyzeCorr
} = require('../services/pythonService');

// POST /analyze — correlation analysis
const postAnalyze = async (req, res) => {
try {
const result = await analyze(req.validatedBody || req.body);
res.json(result);
} catch (err) {
console.error('Analyze error:', err.message);
res.status(500).json({ error: 'Failed to perform analysis' });
}
};

// POST /visualize — returns base64 image
const postVisualize = async (req, res) => {
try {
const result = await visualize(req.validatedBody || req.body);
res.json(result);
} catch (err) {
console.error('Visualization error:', err.message);
res.status(500).json({ error: 'Failed to generate visualization' });
}
};

// POST /analyze-csv — returns cleaned CSV
const postAnalyzeCsv = async (req, res) => {
try {
const result = await analyzeCsv(req.validatedBody || req.body);
res.setHeader('Content-Disposition', 'attachment; filename=report.csv');
res.setHeader('Content-Type', 'text/csv');
res.send(result);
} catch (err) {
console.error('Analyze CSV error:', err.message);
res.status(500).json({ error: 'Failed to analyze CSV' });
}
};

// POST /analyze-corr — returns correlation matrix
const postAnalyzeCorr = async (req, res) => {
try {
const result = await analyzeCorr(req.validatedBody || req.body);
res.json(result);
} catch (err) {
console.error('Analyze Corr error:', err.message);
res.status(500).json({ error: 'Failed to perform correlation analysis' });
}
};

module.exports = {
postAnalyze,
postVisualize,
postAnalyzeCsv,
postAnalyzeCorr
};
4 changes: 4 additions & 0 deletions BackendCode/data/mock.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[
{ "id": 1, "name": "Stream A", "value": 123 },
{ "id": 2, "name": "Stream B", "value": 456 }
]
Loading