Skip to content

Commit

Permalink
Merge remote-tracking branch 'refs/remotes/origin/main' into v1.0-test
Browse files Browse the repository at this point in the history
# Conflicts:
#	.idea/workspace.xml
#	Dockerfile
#	source/handlers/biclustering.py
#	source/handlers/clustering.py
#	source/handlers/ensemble.py
#	source/handlers/nearestneighbours.py
#	source/handlers/neuralnetwork.py
#	source/handlers/pipeline.py
#	source/handlers/regression.py
#	source/handlers/svm.py
#	source/handlers/tree.py
#	source/handlers/xgboost.py
#	source/helpers/json_to_predreq.py
  • Loading branch information
alarv committed Jul 26, 2024
2 parents 7698f63 + d3019a0 commit 026306f
Show file tree
Hide file tree
Showing 15 changed files with 731 additions and 676 deletions.
40 changes: 40 additions & 0 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: Publish Docker image

on:
push:
branches: [
"release/sk-learn0.20",
"release/sk-learn0.22",
"release/sk-learn0.23",
"release/sk-learn0.24",
"release/sk-learn1",
]

jobs:
push_to_registry:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v4

- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: upcintua/legacy-generic-python

- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
2 changes: 1 addition & 1 deletion .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 9 additions & 7 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
FROM python:3.7

RUN pip install --upgrade pip
RUN pip install fastapi uvicorn
RUN pip install tornado==4.2
RUN pip install numpy
RUN pip install scikit-learn==0.20.0
# RUN pip install xgboost==1.3.3
RUN pip install scikit-learn==0.20.4
RUN pip install pandas
RUN pip install xgboost

# Expose the ports we're interested in
EXPOSE 8002

COPY ./source /app/source
COPY application.py /app/application.py

CMD ["python", "/app/application.py"]
# CMD ["uvicorn", "/app:app.py", "--host", "0.0.0.0", "--port", "8002"]
ADD source /generic-python/source
ADD application.py /generic-python/application.py

CMD ["python","/generic-python/application.py"]

3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
# generic-python

The old python inference repository that is not used anymore.

Some images are built in this repository to support old models
119 changes: 60 additions & 59 deletions source/handlers/biclustering.py
Original file line number Diff line number Diff line change
@@ -1,59 +1,60 @@
# from tornado import httpserver
# from tornado import gen
# from tornado.ioloop import IOLoop
# import tornado.web
# from tornado.escape import json_decode, json_encode
# from ..entities.prediction_request import PredictionRequest
# from ..entities.dataset import Dataset
# from ..entities.dataentry import DataEntry
# from ..helpers import model_decoder, json_to_predreq
# from ..helpers import doa_calc
# import numpy as np
#
#
# class BiclusteringModelHandler(tornado.web.RequestHandler):
# # @tornado.asynchronous
# # @gen.engine
# def post(self):
# # print(self.request.body)
# json_request = json_decode(self.request.body)
# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo'])
# predFeatures = pred_request.additionalInfo['predictedFeatures']
# rawModel = pred_request.rawModel[0]
# model = model_decoder.decode(rawModel)
# dataEntryAll = json_to_predreq.decode(self.request)
# doaM = []
# try:
# doaM = json_request['doaMatrix']
# except KeyError:
# pass
# a = None
# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0:
# doaMnp = np.asarray(doaM)
# a = doa_calc.calc_doa(doaMnp, dataEntryAll)
# predictions = model.predict(dataEntryAll)
# preds = []
# j = 0
# for i in list(predFeatures.values()):
# for pred in predictions:
# if np.issubdtype(type(predictions[j]), int):
# fPred = {i: int(predictions[j])}
# if a is not None:
# for key, value in a[j].items():
# fPred[key] = value
# preds.append(fPred)
# if np.issubdtype(type(predictions[j]), float):
# fPred = {i: float(predictions[j])}
# if a is not None:
# for key, value in a[j].items():
# fPred[key] = value
# preds.append(fPred)
# if np.issubdtype(type(predictions[j]), str):
# fPred = {i: predictions[j]}
# if a is not None:
# for key, value in a[j].items():
# fPred[key] = value
# preds.append(fPred)
# j += 1
# finalAll = {"predictions": preds}
# self.write(json_encode(finalAll))
from tornado import httpserver
from tornado import gen
from tornado.ioloop import IOLoop
import tornado.web
from tornado.escape import json_decode, json_encode
from ..entities.prediction_request import PredictionRequest
from ..entities.dataset import Dataset
from ..entities.dataentry import DataEntry
from ..helpers import model_decoder, json_to_predreq
from ..helpers import doa_calc
import numpy as np


class BiclusteringModelHandler(tornado.web.RequestHandler):
# @tornado.asynchronous
# @gen.engine
def post(self):
# print(self.request.body)
json_request = json_decode(self.request.body)
pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo'])
predFeatures = pred_request.additionalInfo['predictedFeatures']
rawModel = pred_request.rawModel[0]
model = model_decoder.decode(rawModel)
dataEntryAll = json_to_predreq.decode(self.request)
doaM = []
try:
doaM = json_request['doaMatrix']
except KeyError:
pass
a = None
if type(doaM).__name__ != 'NoneType' and len(doaM) > 0:
doaMnp = np.asarray(doaM)
a = doa_calc.calc_doa(doaMnp, dataEntryAll)
predictions = model.predict(dataEntryAll)
preds = []
j = 0
for i in list(predFeatures.values()):
for pred in predictions:
if np.issubdtype(type(predictions[j]), int):
fPred = {i: int(predictions[j])}
if a is not None:
for key, value in a[j].items():
fPred[key] = value
preds.append(fPred)
if np.issubdtype(type(predictions[j]), float):
fPred = {i: float(predictions[j])}
if a is not None:
for key, value in a[j].items():
fPred[key] = value
preds.append(fPred)
if np.issubdtype(type(predictions[j]), str):
fPred = {i: predictions[j]}
if a is not None:
for key, value in a[j].items():
fPred[key] = value
preds.append(fPred)
j += 1
finalAll = {"predictions": preds}
self.set_header("Content-Type", "application/json")
self.write(json_encode(finalAll))
119 changes: 60 additions & 59 deletions source/handlers/clustering.py
Original file line number Diff line number Diff line change
@@ -1,59 +1,60 @@
# from tornado import httpserver
# from tornado import gen
# from tornado.ioloop import IOLoop
# import tornado.web
# from tornado.escape import json_decode, json_encode
# from ..entities.prediction_request import PredictionRequest
# from ..entities.dataset import Dataset
# from ..entities.dataentry import DataEntry
# from ..helpers import model_decoder, json_to_predreq, doa_calc
#
# import numpy as np
#
#
# class ClusteringModelHandler(tornado.web.RequestHandler):
# # @tornado.asynchronous
# # @gen.engine
# def post(self):
# # print(self.request.body)
# json_request = json_decode(self.request.body)
# pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo'])
# predFeatures = pred_request.additionalInfo['predictedFeatures']
# rawModel = pred_request.rawModel[0]
# model = model_decoder.decode(rawModel)
# dataEntryAll = json_to_predreq.decode(self.request)
# doaM = []
# try:
# doaM = json_request['doaMatrix']
# except KeyError:
# pass
# a = None
# if type(doaM).__name__ != 'NoneType' and len(doaM) > 0:
# doaMnp = np.asarray(doaM)
# a = doa_calc.calc_doa(doaMnp, dataEntryAll)
# predictions = model.predict(dataEntryAll)
# preds = []
# j = 0
# for i in list(predFeatures.values()):
# for pred in predictions:
# if np.issubdtype(type(predictions[j]), int):
# fPred = {i: int(predictions[j])}
# if a is not None:
# for key, value in a[j].items():
# fPred[key] = value
# preds.append(fPred)
# if np.issubdtype(type(predictions[j]), float):
# fPred = {i: float(predictions[j])}
# if a is not None:
# for key, value in a[j].items():
# fPred[key] = value
# preds.append(fPred)
# if np.issubdtype(type(predictions[j]), str):
# fPred = {i: predictions[j]}
# if a is not None:
# for key, value in a[j].items():
# fPred[key] = value
# preds.append(fPred)
# j += 1
# finalAll = {"predictions": preds}
# self.write(json_encode(finalAll))
from tornado import httpserver
from tornado import gen
from tornado.ioloop import IOLoop
import tornado.web
from tornado.escape import json_decode, json_encode
from ..entities.prediction_request import PredictionRequest
from ..entities.dataset import Dataset
from ..entities.dataentry import DataEntry
from ..helpers import model_decoder, json_to_predreq, doa_calc

import numpy as np


class ClusteringModelHandler(tornado.web.RequestHandler):
# @tornado.asynchronous
# @gen.engine
def post(self):
# print(self.request.body)
json_request = json_decode(self.request.body)
pred_request = PredictionRequest(json_request['dataset'], json_request['rawModel'], json_request['additionalInfo'])
predFeatures = pred_request.additionalInfo['predictedFeatures']
rawModel = pred_request.rawModel[0]
model = model_decoder.decode(rawModel)
dataEntryAll = json_to_predreq.decode(self.request)
doaM = []
try:
doaM = json_request['doaMatrix']
except KeyError:
pass
a = None
if type(doaM).__name__ != 'NoneType' and len(doaM) > 0:
doaMnp = np.asarray(doaM)
a = doa_calc.calc_doa(doaMnp, dataEntryAll)
predictions = model.predict(dataEntryAll)
preds = []
j = 0
for i in list(predFeatures.values()):
for pred in predictions:
if np.issubdtype(type(predictions[j]), int):
fPred = {i: int(predictions[j])}
if a is not None:
for key, value in a[j].items():
fPred[key] = value
preds.append(fPred)
if np.issubdtype(type(predictions[j]), float):
fPred = {i: float(predictions[j])}
if a is not None:
for key, value in a[j].items():
fPred[key] = value
preds.append(fPred)
if np.issubdtype(type(predictions[j]), str):
fPred = {i: predictions[j]}
if a is not None:
for key, value in a[j].items():
fPred[key] = value
preds.append(fPred)
j += 1
finalAll = {"predictions": preds}
self.set_header("Content-Type", "application/json")
self.write(json_encode(finalAll))
Loading

0 comments on commit 026306f

Please sign in to comment.