diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..ed60f1f4 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +node_modules +npm-debug.log +Dockerfile +docker-compose.yml +.vscode +.git +.gitignore +.env \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..5a78a6a5 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Predict + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 00000000..13c0c78d --- /dev/null +++ b/README.md @@ -0,0 +1,97 @@ +# PREDICT +Predict contiene un modelo preentrenado con ```TensorFlow```. Una vez confirma que el modelo está correctamente cargado y que los datos tienen el tamaño correcto (el que debe devolver acquire), se ejecuta obteniendo una predicción del consumo de energía. Este resultado se almacena en ```MongoDB```. + +## Repositorios del proyecto +```bash +https://github.com/ppf30/acquire.git +``` +```bash +https://github.com/ppf30/orchestrator.git +``` +```bash +https://github.com/ppf30/predict.git +``` + + +## Uso Local + +```bash +# Iniciar el orquestador +node server.js +``` + +## Uso docker +Todo el proyecto está dockerizado, por lo tanto si queremos probarlo con contenedores debemos clonar los repositorios y con el ```docker-compose.yml``` en la carpeta, ejecutamos los siguientes comandos en la terminal: +```bash +docker-compose up -d --build +``` + +Al finalizar podemos eliminar los contenedores: +```bash +docker-compose down +``` +## Pruebas en Postman +GET http://localhost:3001/health + +GET http://localhost:3001/ready + +POST http://localhost:3001/predict +```bash +# Body +{ + "features": [1.315, 1.81, 1.27, 8, 0, 9, 30], + "meta": { + "source": "orchestrator", + "dataId": "6772c1f3e2a0b12345678901", + "featureCount": 7, + "scalerVersion": "v1", + "targetDate": "2025-11-26T22:00:00.000Z", + "dailyValues": [28.186, 27.809, 27.44], + "kunnaMeta": { + "alias": "6339651", + "name": "1d", + "daysUsed": ["2025-11-25", "2025-11-24", "2025-11-23"] + }, + "fetchMeta": { + "timeStart": "2025-11-22T18:43:10.000Z", + "timeEnd": "2025-11-25T18:43:10.000Z" + } + } +} +``` + + + + +## Lenguaje + +* Todo el código está en Java Scrip + +## Estructura del Proyecto + +``` +predict/ +│── controllers/ +│──│── predictControllers.js +│── model/ +│──│── group1-shard1of1.bin +│──│── model.json +│──│── Prediction.js +│── node_modules/ +│── routes/ +│──│── predictRoutes.js +│── services/ +│──│── tfModelService.js +│── dockerfile +│── package-lock.json +│── package.json +│── server_all.js +│── server.js +│── README.md + +``` + + +## Licencia + +Este proyecto está bajo la licencia MIT. diff --git a/dockerfile b/dockerfile new file mode 100644 index 00000000..8990ac6c --- /dev/null +++ b/dockerfile @@ -0,0 +1,26 @@ +# Imagen base con Node 22 +FROM node:22-slim + + +# Directorio de trabajo dentro del contenedor +WORKDIR /usr/src/app + + +# Copiamos primero manifiestos para cachear dependencias +COPY package*.json ./ + + +# Instalamos dependencias de producción +RUN npm ci --omit=dev + + +# Copiamos el resto del código (incluye /model) +COPY . . + + +# El servicio escucha en 3002 +EXPOSE 3002 + + +# Comando de arranque +CMD ["node", "server.js"] \ No newline at end of file diff --git a/model/Prediction.js b/model/Prediction.js new file mode 100644 index 00000000..da618943 --- /dev/null +++ b/model/Prediction.js @@ -0,0 +1,23 @@ +//model/prediction.js +'use strict' + +const mongoose = require('mongoose'); +const Schema = mongoose.Schema; + +const PredictionSchema = new Schema({ + source: String, + timestamp: { type: Date, default: Date.now }, + latencyMs: Number, + features: [Number], + prediction: Number, + + featureCount: Number, + scalerVersion: String, + createdAt: { type: Date, default: Date.now }, + targetDate: Date, + dailyValues: [Number], + + +}); + +module.exports = mongoose.model('Prediction', PredictionSchema); \ No newline at end of file diff --git a/package.json b/package.json index 7e709810..42a9fc1b 100644 --- a/package.json +++ b/package.json @@ -11,11 +11,14 @@ }, "author": "Iren Lorenzo Fonseca", "license": "ISC", - "bugs": { - }, + "bugs": {}, "dependencies": { "@tensorflow/tfjs": "^4.22.0", "@tensorflow/tfjs-backend-wasm": "^4.22.0", - "express": "^5.1.0" + "dotenv": "^17.2.3", + "express": "^5.1.0", + "mongodb": "^7.0.0", + "mongoose": "^9.0.1" } -} \ No newline at end of file +} + diff --git a/server.js b/server.js index 4d44675f..f853a215 100644 --- a/server.js +++ b/server.js @@ -1,32 +1,47 @@ // server.js // Entry point del servicio PREDICT +require("dotenv").config(); const express = require("express"); +const app = express(); const path = require("path"); +const mongoose = require("mongoose"); const predictRoutes = require("./routes/predictRoutes"); const { initModel } = require("./services/tfModelService"); const PORT = process.env.PORT || 3002; -const app = express(); + app.use(express.json()); + +// conectar a Mongo +mongoose + .connect(process.env.MONGO_URI) + .then(() => console.log("MongoDB conectado (PREDICT)")) + .catch((err) => { + console.error("Error al conectar MongoDB:", err); + process.exit(1); + }); + // Servir la carpeta del modelo TFJS (model/model.json + pesos) const modelDir = path.resolve(__dirname, "model"); app.use("/model", express.static(modelDir)); -// Rutas del servicio PREDICT + app.use("/", predictRoutes); -// Arranque del servidor + carga del modelo + app.listen(PORT, async () => { const serverUrl = `http://localhost:${PORT}`; - console.log(`[PREDICT] Servicio escuchando en ${serverUrl}`); + console.log(`PREDICT escuchando en ${serverUrl}`); try { + // Inicializa el modelo predictivo await initModel(serverUrl); + console.log(" Modelo predictivo cargado correctamente."); } catch (err) { - console.error("Error al inicializar modelo:", err); + console.error("Error al inicializar el modelo predictivo:", err); process.exit(1); } }); diff --git a/services/tfModelService.js b/services/tfModelService.js index aff8b787..01af4168 100644 --- a/services/tfModelService.js +++ b/services/tfModelService.js @@ -34,10 +34,7 @@ function wasmFileDirUrl() { return pathToFileURL(distFsPath + path.sep).href; } -/** - * Inicializa backend WASM y carga el GraphModel - * serverUrl: ej. http://localhost:3002 - */ + async function initModel(serverUrl) { const wasmPath = wasmFileDirUrl(); wasmBackend.setWasmPaths(wasmPath); @@ -66,7 +63,6 @@ async function initModel(serverUrl) { throw new Error("No se ha podido detectar inputName/outputName/inputDim"); } - // Warm-up const Xwarm = tf.zeros([1, inputDim], "float32"); let out; if (typeof model.executeAsync === "function") { @@ -83,10 +79,7 @@ async function initModel(serverUrl) { console.log("[TF] Modelo listo."); } -/** - * Ejecuta el modelo con un vector de features - * Devuelve un escalar >= 0 - */ + async function predict(features) { if (!ready || !model) { throw new Error("Model not ready");