diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..2fcfa5e --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,40 @@ +name: Publish Docker image + +on: + push: + branches: [ + "release/sk-learn0.20", + "release/sk-learn0.22", + "release/sk-learn0.23", + "release/sk-learn0.24", + "release/sk-learn1", + ] + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Log in to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: upcintua/legacy-generic-python + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.idea/workspace.xml b/.idea/workspace.xml index 06cb252..dec0cee 100644 --- a/.idea/workspace.xml +++ b/.idea/workspace.xml @@ -353,4 +353,4 @@ - \ No newline at end of file + diff --git a/Dockerfile b/Dockerfile index 832592e..67f9d01 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,18 @@ FROM python:3.7 RUN pip install --upgrade pip -RUN pip install fastapi uvicorn +RUN pip install tornado==4.2 RUN pip install numpy -RUN pip install scikit-learn==0.20.0 -# RUN pip install xgboost==1.3.3 +RUN pip install scikit-learn==0.20.4 RUN pip install pandas +RUN pip install xgboost +# Expose the ports we're interested in EXPOSE 8002 -COPY ./source /app/source -COPY application.py /app/application.py -CMD ["python", "/app/application.py"] -# CMD ["uvicorn", "/app:app.py", "--host", "0.0.0.0", "--port", "8002"] \ No newline at end of file +ADD source /generic-python/source +ADD application.py /generic-python/application.py + +CMD ["python","/generic-python/application.py"] + diff --git a/README.md b/README.md index 7fe9c66..e946dd1 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,5 @@ # generic-python +The old python inference repository that is not used anymore. + +Some images are built in this repository to support old models diff --git a/source/handlers/biclustering.py b/source/handlers/biclustering.py index 83e5370..3c7b2cc 100644 --- a/source/handlers/biclustering.py +++ b/source/handlers/biclustering.py @@ -1,59 +1,60 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq -# from ..helpers import doa_calc -# import numpy as np -# -# -# class BiclusteringModelHandler(tornado.web.RequestHandler): -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq +from ..helpers import doa_calc +import numpy as np + + +class BiclusteringModelHandler(tornado.web.RequestHandler): + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/clustering.py b/source/handlers/clustering.py index 4701053..7d73bb6 100644 --- a/source/handlers/clustering.py +++ b/source/handlers/clustering.py @@ -1,59 +1,60 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# -# import numpy as np -# -# -# class ClusteringModelHandler(tornado.web.RequestHandler): -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc + +import numpy as np + + +class ClusteringModelHandler(tornado.web.RequestHandler): + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/ensemble.py b/source/handlers/ensemble.py index f896e9a..5c7362f 100644 --- a/source/handlers/ensemble.py +++ b/source/handlers/ensemble.py @@ -1,59 +1,60 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# -# import numpy as np -# -# -# class EnsembleModelHandler(tornado.web.RequestHandler): -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc + +import numpy as np + + +class EnsembleModelHandler(tornado.web.RequestHandler): + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/nearestneighbours.py b/source/handlers/nearestneighbours.py index e6f1d81..0007889 100644 --- a/source/handlers/nearestneighbours.py +++ b/source/handlers/nearestneighbours.py @@ -1,59 +1,60 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# -# import numpy as np -# -# -# class NearestNeighboursModelHandler(tornado.web.RequestHandler): -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc + +import numpy as np + + +class NearestNeighboursModelHandler(tornado.web.RequestHandler): + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/neuralnetwork.py b/source/handlers/neuralnetwork.py index cf80f17..1b55950 100644 --- a/source/handlers/neuralnetwork.py +++ b/source/handlers/neuralnetwork.py @@ -1,86 +1,87 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq -# -# import numpy as np -# -# -# class NeuralNetworkModelHandler(tornado.web.RequestHandler): -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) -# # predictions = model.predict(dataEntryAll) -# # preds = [] -# # j = 0 -# # for i in list(predFeatures.values()): -# # for pred in predictions: -# # if np.issubdtype(type(predictions[j]), int): -# # fPred = {i: int(predictions[j])} -# # preds.append(fPred) -# # if np.issubdtype(type(predictions[j]), float): -# # fPred = {i: float(predictions[j])} -# # preds.append(fPred) -# # if np.issubdtype(type(predictions[j]), str): -# # fPred = {i: predictions[j]} -# # preds.append(fPred) -# # j += 1 -# # # if np.int64 == np.dtype(predictions[j]).type: -# # # fPred = {i: int(predictions[j])} -# # # preds.append(fPred) -# # # if np.float64 == np.dtype(predictions[j]).type: -# # # fPred = {i: float(predictions[j])} -# # # preds.append(fPred) -# # # if np.str_ == np.dtype(str).type: -# # # fPred = {i: predictions[j]} -# # # preds.append(fPred) -# # # j += 1 -# # finalAll = {"predictions": preds} -# # self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq + +import numpy as np + + +class NeuralNetworkModelHandler(tornado.web.RequestHandler): + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) + # predictions = model.predict(dataEntryAll) + # preds = [] + # j = 0 + # for i in list(predFeatures.values()): + # for pred in predictions: + # if np.issubdtype(type(predictions[j]), int): + # fPred = {i: int(predictions[j])} + # preds.append(fPred) + # if np.issubdtype(type(predictions[j]), float): + # fPred = {i: float(predictions[j])} + # preds.append(fPred) + # if np.issubdtype(type(predictions[j]), str): + # fPred = {i: predictions[j]} + # preds.append(fPred) + # j += 1 + # # if np.int64 == np.dtype(predictions[j]).type: + # # fPred = {i: int(predictions[j])} + # # preds.append(fPred) + # # if np.float64 == np.dtype(predictions[j]).type: + # # fPred = {i: float(predictions[j])} + # # preds.append(fPred) + # # if np.str_ == np.dtype(str).type: + # # fPred = {i: predictions[j]} + # # preds.append(fPred) + # # j += 1 + # finalAll = {"predictions": preds} + # self.write(json_encode(finalAll)) diff --git a/source/handlers/pipeline.py b/source/handlers/pipeline.py index 5d982f1..f4a143e 100644 --- a/source/handlers/pipeline.py +++ b/source/handlers/pipeline.py @@ -1,59 +1,60 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# -# import numpy as np -# -# -# class PipelineHandler(tornado.web.RequestHandler): -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc + +import numpy as np + + +class PipelineHandler(tornado.web.RequestHandler): + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/regression.py b/source/handlers/regression.py index d372775..9c097ef 100644 --- a/source/handlers/regression.py +++ b/source/handlers/regression.py @@ -1,63 +1,64 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.gen import Task -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# import asyncio -# -# import numpy as np -# -# -# class LinearModelHandler(tornado.web.RequestHandler): -# # @asynchronous -# # @gen.engine -# # @gen.coroutine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'] -# , json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) \ No newline at end of file +from tornado import httpserver +from tornado import gen +from tornado.gen import Task +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc +import asyncio + +import numpy as np + + +class LinearModelHandler(tornado.web.RequestHandler): + # @asynchronous + # @gen.engine + # @gen.coroutine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'] + , json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/svm.py b/source/handlers/svm.py index 9b00f1d..59d2937 100644 --- a/source/handlers/svm.py +++ b/source/handlers/svm.py @@ -1,59 +1,60 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# -# import numpy as np -# -# -# class SvmModelHandler(tornado.web.RequestHandler): -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc + +import numpy as np + + +class SvmModelHandler(tornado.web.RequestHandler): + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/tree.py b/source/handlers/tree.py index 3d39fa8..2288ab5 100644 --- a/source/handlers/tree.py +++ b/source/handlers/tree.py @@ -1,60 +1,61 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# import numpy as np -# import pandas as pd -# -# -# class TreeModelHandler(tornado.web.RequestHandler): -# -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# predictions = model.predict(dataEntryAll) -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc +import numpy as np +import pandas as pd + + +class TreeModelHandler(tornado.web.RequestHandler): + + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + predictions = model.predict(dataEntryAll) + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/handlers/xgboost.py b/source/handlers/xgboost.py index b07adca..1555142 100644 --- a/source/handlers/xgboost.py +++ b/source/handlers/xgboost.py @@ -1,69 +1,70 @@ -# from tornado import httpserver -# from tornado import gen -# from tornado.ioloop import IOLoop -# import tornado.web -# from tornado.escape import json_decode, json_encode -# from ..entities.prediction_request import PredictionRequest -# from ..entities.dataset import Dataset -# from ..entities.dataentry import DataEntry -# from ..helpers import model_decoder, json_to_predreq, doa_calc -# import numpy as np -# import pandas as pd -# import xgboost as xgb -# -# -# class XGBoostHandler(tornado.web.RequestHandler): -# -# # @tornado.asynchronous -# # @gen.engine -# def post(self): -# # print(self.request.body) -# json_request = json_decode(self.request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# predFeatures = pred_request.additionalInfo['predictedFeatures'] -# rawModel = pred_request.rawModel[0] -# model = model_decoder.decode(rawModel) -# cols_when_model_builds = model.get_booster().feature_names -# dataEntryAll = json_to_predreq.decode(self.request) -# doaM = [] -# try: -# doaM = json_request['doaMatrix'] -# except KeyError: -# pass -# a = None -# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: -# doaMnp = np.asarray(doaM) -# a = doa_calc.calc_doa(doaMnp, dataEntryAll) -# # dataEntryNew = xgb.DMatrix(dataEntryAll, label=pred_request.additionalInfo['fromUser']['inputSeries']) -# # print(dataEntryNew) -# # if dataEntryNew[0][len(dataEntryNew) - 1] == 0: -# # dataEntryNew[0][len(dataEntryNew) - 1] = 0.000001 -# # dataEntryNew = np.array(dataEntryAll).reshape((1, -1)) -# dataEntryNew = np.array(dataEntryAll) -# data = pd.DataFrame(data=dataEntryNew, columns=pred_request.additionalInfo['fromUser']['inputSeries']) -# predictions = model.predict(data).tolist() -# preds = [] -# j = 0 -# for i in list(predFeatures.values()): -# for pred in predictions: -# if np.issubdtype(type(predictions[j]), int): -# fPred = {i: int(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), float): -# fPred = {i: float(predictions[j])} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# if np.issubdtype(type(predictions[j]), str): -# fPred = {i: predictions[j]} -# if a is not None: -# for key, value in a[j].items(): -# fPred[key] = value -# preds.append(fPred) -# j += 1 -# finalAll = {"predictions": preds} -# self.write(json_encode(finalAll)) +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode +from ..entities.prediction_request import PredictionRequest +from ..entities.dataset import Dataset +from ..entities.dataentry import DataEntry +from ..helpers import model_decoder, json_to_predreq, doa_calc +import numpy as np +import pandas as pd +import xgboost as xgb + + +class XGBoostHandler(tornado.web.RequestHandler): + + # @tornado.asynchronous + # @gen.engine + def post(self): + # print(self.request.body) + json_request = json_decode(self.request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + predFeatures = pred_request.additionalInfo['predictedFeatures'] + rawModel = pred_request.rawModel[0] + model = model_decoder.decode(rawModel) + cols_when_model_builds = model.get_booster().feature_names + dataEntryAll = json_to_predreq.decode(self.request) + doaM = [] + try: + doaM = json_request['doaMatrix'] + except KeyError: + pass + a = None + if type(doaM).__name__ != 'NoneType' and len(doaM) > 0: + doaMnp = np.asarray(doaM) + a = doa_calc.calc_doa(doaMnp, dataEntryAll) + # dataEntryNew = xgb.DMatrix(dataEntryAll, label=pred_request.additionalInfo['fromUser']['inputSeries']) + # print(dataEntryNew) + # if dataEntryNew[0][len(dataEntryNew) - 1] == 0: + # dataEntryNew[0][len(dataEntryNew) - 1] = 0.000001 + # dataEntryNew = np.array(dataEntryAll).reshape((1, -1)) + dataEntryNew = np.array(dataEntryAll) + data = pd.DataFrame(data=dataEntryNew, columns=pred_request.additionalInfo['fromUser']['inputSeries']) + predictions = model.predict(data).tolist() + preds = [] + j = 0 + for i in list(predFeatures.values()): + for pred in predictions: + if np.issubdtype(type(predictions[j]), int): + fPred = {i: int(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), float): + fPred = {i: float(predictions[j])} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + if np.issubdtype(type(predictions[j]), str): + fPred = {i: predictions[j]} + if a is not None: + for key, value in a[j].items(): + fPred[key] = value + preds.append(fPred) + j += 1 + finalAll = {"predictions": preds} + self.set_header("Content-Type", "application/json") + self.write(json_encode(finalAll)) diff --git a/source/helpers/json_to_predreq.py b/source/helpers/json_to_predreq.py index 1af4ecf..9a4f6af 100644 --- a/source/helpers/json_to_predreq.py +++ b/source/helpers/json_to_predreq.py @@ -1,51 +1,51 @@ +from tornado import httpserver +from tornado import gen +from tornado.ioloop import IOLoop +import tornado.web +from tornado.escape import json_decode, json_encode from ..entities.prediction_request import PredictionRequest from ..entities.dataset import Dataset from ..entities.dataentry import DataEntry from ..helpers import model_decoder import numpy as np -from typing import Dict, Any - -# def decode(request): -# json_request = json_decode(request.body) -# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) -# input_series = pred_request.additionalInfo['fromUser']['inputSeries'] -# independentFeatures = pred_request.additionalInfo['independentFeatures'] -# shorted = [] -# actualIndepFeatKeys = [] -# # pred_request.dataset.features['key'] -# for actual in input_series: -# for key in independentFeatures: -# if actual == independentFeatures[key]: -# for feature in pred_request.dataset['features']: -# if feature['name'] == actual: -# shorted.append(feature['key']) -# # shorted.append(key) -# dataEntryAll = [] -# for dataEntry in pred_request.dataset['dataEntry']: -# dataEntryToInsert = [] -# for key in shorted: -# dataEntryToInsert.append(dataEntry['values'][key]) -# dataEntryAll.append(dataEntryToInsert) -# return dataEntryAll - - -def decode(dataset, additionl_info): - input_series = additionl_info['fromUser']['inputSeries'] - independentFeatures = additionl_info['independentFeatures'] + + +def decode(request): + json_request = json_decode(request.body) + pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo']) + + print("Dataset structure:") + print(pred_request.dataset) + + input_series = pred_request.additionalInfo['fromUser']['inputSeries'] + independentFeatures = pred_request.additionalInfo['independentFeatures'] shorted = [] - actualIndepFeatKeys = [] - # pred_request.dataset.features['key'] + for actual in input_series: for key in independentFeatures: if actual == independentFeatures[key]: - for feature in dataset['features']: + for feature in pred_request.dataset['features']: if feature['name'] == actual: shorted.append(feature['key']) - # shorted.append(key) + + print("Shorted keys:", shorted) + dataEntryAll = [] - for dataEntry in dataset['dataEntry']: + dataEntry = pred_request.dataset['dataEntry'] + print("DataEntry structure:", dataEntry) + + if isinstance(dataEntry, dict) and 'values' in dataEntry: + values = dataEntry['values'] dataEntryToInsert = [] for key in shorted: - dataEntryToInsert.append(dataEntry['values'][key]) + print(f"Accessing key: {key}, type: {type(key)}") + value = values.get(str(key)) + print(f"Retrieved value: {value}") + if value is not None: + dataEntryToInsert.append(value) dataEntryAll.append(dataEntryToInsert) - return dataEntryAll \ No newline at end of file + else: + print(f"Unexpected dataEntry structure: {dataEntry}") + + print(f"DataEntryAll: {dataEntryAll}") + return dataEntryAll