Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update docs #60

Merged
merged 3 commits into from
Oct 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions .github/workflows/e2e-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ jobs:
echo API_NAME='github-testing-process-api' >> .env
echo STORAGE_SERVICE='minio' >> .env
echo STORAGE_BUCKET='api-storage' >> .env
echo STORAGE_METADATA_DIR='metadata' >> .env
echo STORAGE_RESULTS_DIR='results' >> .env
echo STORAGE_LOGS_DIR='logs' >> .env
echo STORAGE_METADATA_PREFIX='metadata' >> .env
echo STORAGE_RESULTS_PREFIX='results' >> .env
echo STORAGE_LOGS_PREFIX='logs' >> .env

echo LOCAL_LOGS_DIR='../.data/logs' >> .env

Expand All @@ -33,6 +33,8 @@ jobs:
echo MINIO_S3_REGION='us-east-1' >> .env
echo MINIO_S3_DISABLE_SSL=true >> .env
echo MINIO_S3_FORCE_PATH_STYLE=true >> .env
echo MINIO_ROOT_USER=user >> .env
echo MINIO_ROOT_PASSWORD=password >> .env

echo AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} >> .env
echo AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} >> .env
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,6 @@
.data/*
*main
settings.json


.miniodata/
41 changes: 30 additions & 11 deletions api/docs/docs.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

38 changes: 28 additions & 10 deletions api/docs/swagger.json
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@
"post": {
"description": "[Execute Process Specification](https://docs.ogc.org/is/18-062r2/18-062r2.html#sc_create_job)",
"consumes": [
"*/*"
"application/json"
],
"produces": [
"application/json"
Expand All @@ -284,8 +284,8 @@
"required": true
},
{
"description": "example: {text”:“Hello World!”}",
"name": "input",
"description": "example: {inputs: {text:Hello World!}} (add double quotes for all strings in the payload)",
"name": "inputs",
"in": "body",
"required": true,
"schema": {
Expand Down Expand Up @@ -333,23 +333,23 @@
"jobs.JobLogs": {
"type": "object",
"properties": {
"api_logs": {
"type": "array",
"items": {
"type": "string"
}
},
"container_logs": {
"type": "array",
"items": {
"type": "string"
"$ref": "#/definitions/jobs.LogEntry"
}
},
"jobID": {
"type": "string"
},
"processID": {
"type": "string"
},
"server_logs": {
"type": "array",
"items": {
"$ref": "#/definitions/jobs.LogEntry"
}
}
}
},
Expand Down Expand Up @@ -380,6 +380,20 @@
}
}
},
"jobs.LogEntry": {
"type": "object",
"properties": {
"level": {
"type": "string"
},
"msg": {
"type": "string"
},
"time": {
"type": "string"
}
}
},
"processes.Info": {
"type": "object",
"properties": {
Expand Down Expand Up @@ -557,5 +571,9 @@
}
}
}
},
"externalDocs": {
"description": "Schemas",
"url": "http://schemas.opengis.net/ogcapi/processes/part1/1.0/openapi/schemas/"
}
}
29 changes: 21 additions & 8 deletions api/docs/swagger.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,18 @@ definitions:
type: object
jobs.JobLogs:
properties:
api_logs:
items:
type: string
type: array
container_logs:
items:
type: string
$ref: '#/definitions/jobs.LogEntry'
type: array
jobID:
type: string
processID:
type: string
server_logs:
items:
$ref: '#/definitions/jobs.LogEntry'
type: array
type: object
jobs.JobRecord:
properties:
Expand All @@ -50,6 +50,15 @@ definitions:
updated:
type: string
type: object
jobs.LogEntry:
properties:
level:
type: string
msg:
type: string
time:
type: string
type: object
processes.Info:
properties:
description:
Expand Down Expand Up @@ -165,6 +174,9 @@ definitions:
$ref: '#/definitions/processes.Outputs'
type: array
type: object
externalDocs:
description: Schemas
url: http://schemas.opengis.net/ogcapi/processes/part1/1.0/openapi/schemas/
host: localhost:5050
info:
contact:
Expand Down Expand Up @@ -340,17 +352,18 @@ paths:
/processes/{processID}/execution:
post:
consumes:
- '*/*'
- application/json
description: '[Execute Process Specification](https://docs.ogc.org/is/18-062r2/18-062r2.html#sc_create_job)'
parameters:
- description: pyecho
in: path
name: processID
required: true
type: string
- description: 'example: {“text”:“Hello World!”}'
- description: 'example: {inputs: {text:Hello World!}} (add double quotes for
all strings in the payload)'
in: body
name: input
name: inputs
required: true
schema:
type: string
Expand Down
4 changes: 2 additions & 2 deletions api/handlers/handlers.go
Original file line number Diff line number Diff line change
Expand Up @@ -241,10 +241,10 @@ func (rh *RESTHandler) ProcessDescribeHandler(c echo.Context) error {
// @Summary Execute Process
// @Description [Execute Process Specification](https://docs.ogc.org/is/18-062r2/18-062r2.html#sc_create_job)
// @Tags processes
// @Accept */*
// @Accept json
// @Produce json
// @Param processID path string true "pyecho"
// @Param input body string true "example: {text”:“Hello World!”}"
// @Param inputs body string true "example: {inputs: {text:Hello World!}} (add double quotes for all strings in the payload)"
// @Success 200 {object} jobResponse
// @Router /processes/{processID}/execution [post]
// Does not produce HTML
Expand Down
4 changes: 2 additions & 2 deletions api/jobs/aws_batch_jobs.go
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ func (j *AWSBatchJob) WriteMetaData() {
return
}

metadataDir := os.Getenv("STORAGE_METADATA_DIR")
metadataDir := os.Getenv("STORAGE_METADATA_PREFIX")
mdLocation := fmt.Sprintf("%s/%s.json", metadataDir, j.UUID)
// TODO: Determine if batch metadata should be put on aws...currently this is the case
utils.WriteToS3(j.StorageSvc, jsonBytes, mdLocation, "application/json", 0)
Expand All @@ -415,7 +415,7 @@ func (j *AWSBatchJob) WriteMetaData() {
// j.logger.Info("Starting results writing routine.")
// defer j.logger.Info("Finished results writing routine.")

// resultsDir := os.Getenv("STORAGE_RESULTS_DIR")
// resultsDir := os.Getenv("STORAGE_RESULTS_PREFIX")
// resultsLocation := fmt.Sprintf("%s/%s.json", resultsDir, j.UUID)
// err = utils.WriteToS3(j.StorageSvc, data, resultsLocation, "application/json", 0)
// if err != nil {
Expand Down
4 changes: 2 additions & 2 deletions api/jobs/docker_jobs.go
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ func (j *DockerJob) WriteMetaData() {
return
}

metadataDir := os.Getenv("STORAGE_METADATA_DIR")
metadataDir := os.Getenv("STORAGE_METADATA_PREFIX")
mdLocation := fmt.Sprintf("%s/%s.json", metadataDir, j.UUID)
err = utils.WriteToS3(j.StorageSvc, jsonBytes, mdLocation, "application/json", 0)
if err != nil {
Expand All @@ -370,7 +370,7 @@ func (j *DockerJob) WriteMetaData() {
// j.logger.Info("Starting results writing routine.")
// defer j.logger.Info("Finished results writing routine.")

// resultsDir := os.Getenv("STORAGE_RESULTS_DIR")
// resultsDir := os.Getenv("STORAGE_RESULTS_PREFIX")
// resultsLocation := fmt.Sprintf("%s/%s.json", resultsDir, j.UUID)
// fmt.Println(resultsLocation)
// err = utils.WriteToS3(j.StorageSvc, data, resultsLocation, "application/json", 0)
Expand Down
8 changes: 4 additions & 4 deletions api/jobs/jobs.go
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ func FetchResults(svc *s3.S3, jid string) (interface{}, error) {
// // If JobID exists but results file doesn't then it raises an error
// // Assumes jobID is valid
// func FetchResults(svc *s3.S3, jid string) (interface{}, error) {
// key := fmt.Sprintf("%s/%s.json", os.Getenv("STORAGE_RESULTS_DIR"), jid)
// key := fmt.Sprintf("%s/%s.json", os.Getenv("STORAGE_RESULTS_PREFIX"), jid)

// exist, err := utils.KeyExists(key, svc)
// if err != nil {
Expand All @@ -214,7 +214,7 @@ func FetchResults(svc *s3.S3, jid string) (interface{}, error) {
// If JobID exists but metadata file doesn't then it raises an error
// Assumes jobID is valid
func FetchMeta(svc *s3.S3, jid string) (interface{}, error) {
key := fmt.Sprintf("%s/%s.json", os.Getenv("STORAGE_METADATA_DIR"), jid)
key := fmt.Sprintf("%s/%s.json", os.Getenv("STORAGE_METADATA_PREFIX"), jid)

exist, err := utils.KeyExists(key, svc)
if err != nil {
Expand Down Expand Up @@ -272,7 +272,7 @@ func FetchLogs(svc *s3.S3, jid, pid string, onlyContainer bool) (JobLogs, error)
}

// If not found locally, check storage
storageKey := fmt.Sprintf("%s/%s.%s.jsonl", os.Getenv("STORAGE_LOGS_DIR"), jid, k.key)
storageKey := fmt.Sprintf("%s/%s.%s.jsonl", os.Getenv("STORAGE_LOGS_PREFIX"), jid, k.key)
exists, err := utils.KeyExists(storageKey, svc)
if err != nil {
return JobLogs{}, err
Expand Down Expand Up @@ -309,7 +309,7 @@ func UploadLogsToStorage(svc *s3.S3, jid, pid string) {
log.Error(err.Error())
}

storageKey := fmt.Sprintf("%s/%s.%s.jsonl", os.Getenv("STORAGE_LOGS_DIR"), jid, k)
storageKey := fmt.Sprintf("%s/%s.%s.jsonl", os.Getenv("STORAGE_LOGS_PREFIX"), jid, k)
err = utils.WriteToS3(svc, bytes, storageKey, "text/plain", 0)
if err != nil {
log.Error(err.Error())
Expand Down
11 changes: 5 additions & 6 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,19 @@ services:
- .env
volumes:
- ./api:/app
- ./.data:/.data
- ./.data/api:/.data
- /var/run/docker.sock:/var/run/docker.sock
networks:
- process_api_net
depends_on:
- minio
# depends_on:
# - minio

minio:
container_name: minio
image: minio/minio:RELEASE.2023-08-23T10-07-06Z.fips
environment:
- MINIO_ROOT_USER=user
- MINIO_ROOT_PASSWORD=password
command: server /data --console-address ":9001"
env_file:
- .env
ports:
- '9000:9000'
- '9001:9001'
Expand Down
Loading