diff --git a/api/.gitignore b/api/.gitignore
index 68bc17f..603ae08 100644
--- a/api/.gitignore
+++ b/api/.gitignore
@@ -158,3 +158,5 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
+
+**/test_*.json
\ No newline at end of file
diff --git a/api/main.py b/api/main.py
index c8492fe..604b8d5 100644
--- a/api/main.py
+++ b/api/main.py
@@ -1,31 +1,22 @@
-from fastapi import FastAPI, Request
-from fastapi.responses import RedirectResponse
+from fastapi import FastAPI
from starlette.middleware.cors import CORSMiddleware
import uvicorn
from src import settings
from src import database_utils, api
-database_utils.init()
+database_utils.create_all()
settings = settings.get_settings()
app = FastAPI(
title=settings.APP_NAME,
- version='0.1.0',
+ version='12.0',
description=f"{settings.APP_NAME} is a web API to manage job applications, it is written in Python using FastAPI.",
contact={
"Name": "BĂ©renger Dalle-Cort",
"Email": "berenger@42borgata.com"
- },
+ },
separate_input_output_schemas=False)
-app.include_router(api.jobApi.router)
-app.include_router(api.eventApi.router)
-app.include_router(api.databaseApi.router)
-
-@app.get("/")
-async def redirect_home_to_docs():
- return RedirectResponse(url='/docs')
-
app.add_middleware(
CORSMiddleware,
allow_origins=settings.ALLOW_ORIGINS,
@@ -34,6 +25,17 @@ async def redirect_home_to_docs():
allow_headers=settings.ALLOW_HEADERS,
)
+app.router.redirect_slashes = False # Optional
+
+app.include_router(api.jobApi.router)
+app.include_router(api.eventApi.router)
+app.include_router(api.databaseApi.router)
+
+@app.get("/")
+async def hello_world():
+ return f"Hello World, my name is {app.title} {app.version}"
+
+
""" Provide a way to run the app using `python -m main` command"""
if __name__ == "__main__":
uvicorn.run("main:app", host=settings.APP_HOST, port=settings.APP_PORT, log_level="info", reload=True)
diff --git a/api/requirements.txt b/api/requirements.txt
index ba29eb3..6ef3b01 100644
--- a/api/requirements.txt
+++ b/api/requirements.txt
@@ -4,4 +4,5 @@ SQLAlchemy~=2.0.23
starlette~=0.27.0
pydantic~=2.5.1
postgres~=4.0
-pydantic-settings~=2.2.1
\ No newline at end of file
+pydantic-settings~=2.2.1
+python-multipart~=0.0.9
\ No newline at end of file
diff --git a/api/src/api/databaseApi.py b/api/src/api/databaseApi.py
index 21397c5..bf8b13b 100644
--- a/api/src/api/databaseApi.py
+++ b/api/src/api/databaseApi.py
@@ -1,35 +1,52 @@
-from fastapi import HTTPException, Depends
+from itertools import count
+from typing import Annotated
+from fastapi import File, Form, HTTPException, Depends, UploadFile
+from pydantic import ValidationError
from sqlalchemy.orm import Session
from starlette import status
+from src.schemas.backup import Backup_v1, make_backup
from src import schemas, crud, database_utils
from src.database_utils import get_db
from fastapi import APIRouter
router = APIRouter(
- prefix="/database",
+ prefix="/db",
tags=['database']
)
-@router.get("/backup/")
-async def backup_database(session: Session = Depends(get_db)) -> schemas.Page[schemas.Job]:
-
- """ Return the whole Job table, can be restored using /database/restore/ endpoint"""
+@router.get("/backup")
+async def backup(session: Session = Depends(get_db)) -> schemas.Backup_v1:
+ # Extract the list of ALL jobs
page = crud.jobCrud.get_all(session)
- return schemas.Page[schemas.Job](
- item=list(map(lambda job: schemas.Job.model_validate(job), page.item)),
- item_total_count=page.item_total_count
- )
+ jobs = list(map(lambda job: schemas.Job.model_validate(job), page.item))
+
+ # Return a backup with the list
+ return make_backup(jobs)
-@router.post("/restore/")
-async def restore_database(jobs: schemas.Page[schemas.Job], session: Session = Depends(get_db)) -> str:
+@router.post("/restore/v1")
+async def restore(
+ file: Annotated[UploadFile, File()],
+ session: Session = Depends(get_db)
+ ) -> str:
- """ Restore the entire database from a large Job list. Existing data will be erased. """
- if database_utils.restore(jobs, session) is False:
+ """ Restore the entire database from a large Job list. Existing data will be erased. """
+
+ backup: schemas.Backup_v1
+
+ try:
+ file_str = file.file.read()
+ backup = schemas.Backup_v1.model_validate_json(file_str)
+ except ValidationError as err:
raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail='Unable to restore the database')
- return "Database restored"
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ detail=err.json())
+
+ if database_utils.erase_and_restore(backup.jobs, session) is False:
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail='Unable to restore the database')
+ return f"Database restored from {file.filename}"
diff --git a/api/src/api/jobApi.py b/api/src/api/jobApi.py
index 23f8683..02747da 100644
--- a/api/src/api/jobApi.py
+++ b/api/src/api/jobApi.py
@@ -6,8 +6,8 @@
from src.database_utils import get_db
from src.exceptions import JobNotFoundException
from src.schemas import Page
-
from fastapi import APIRouter
+
router = APIRouter(
tags=['job'],
prefix="/job"
diff --git a/api/src/crud/jobCrud.py b/api/src/crud/jobCrud.py
index 494c9c4..ae88012 100644
--- a/api/src/crud/jobCrud.py
+++ b/api/src/crud/jobCrud.py
@@ -78,6 +78,6 @@ def get_page(session: Session,
# 3) Handle pagination
return paginate(query, skip, limit)
-def get_all(session):
- query = session.query(models.Job).all()
+def get_all(session: Session):
+ query = session.query(models.Job)
return paginate(query)
diff --git a/api/src/database_utils.py b/api/src/database_utils.py
index 41963d9..49d9980 100644
--- a/api/src/database_utils.py
+++ b/api/src/database_utils.py
@@ -1,25 +1,68 @@
+from fastapi import HTTPException, status
from sqlalchemy.orm import Session
from src import schemas, models
from src.database import Base, engine, SessionLocal
-def init():
+def create_all():
Base.metadata.create_all(bind=engine)
-def reset():
+def drop_all():
Base.metadata.drop_all(bind=engine)
-def restore(page: schemas.Page[schemas.Job], session: Session) -> bool:
+def erase_and_restore(jobs: list[schemas.Job], session: Session) -> bool:
- # Drop all tables and initialize
- reset()
- init()
+ # Convert Jobs and Events to ORM model
+ jobs_to_insert = list()
+ events_to_insert = list()
+ for _job in jobs:
+ job = models.Job(**_job.model_dump(exclude={ 'events' })) # Nested dump is failing TODO: investigate
+ jobs_to_insert.append(job)
+ for _event in _job.events:
+ event = models.Event(**_event.model_dump())
+ event.job_id = job.id;
+ events_to_insert.append(event)
+
+
+ if len(jobs_to_insert) != len(jobs):
+ HTTPException(
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ detail=f"Unable to convert all schemas to models"
+ )
+
+ # Try to drop existing tables
+ try:
+ drop_all()
+ except:
+ session.rollback()
+ HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Unable to drop existing tables, rollback rollbacked"
+ )
+
+ # Try to create clean tables
+ try:
+ create_all()
+ except:
+ session.rollback()
+ HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Unable to create tables after dropping existing, transaction rollbacked"
+ )
+
+ # Add jobs to restore to the db
+ try:
+ session.add_all(jobs_to_insert)
+ session.add_all(events_to_insert)
+ except Exception:
+ session.rollback()
+ HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Unable to restore backup, transaction rollbacked"
+ )
- # Insert back
- db_jobs: list[models.Job] = list(map(lambda j: models.Job(j), page.item))
- session.add_all(db_jobs)
session.commit()
return True
diff --git a/api/src/schemas/__init__.py b/api/src/schemas/__init__.py
index 9194002..f92976c 100644
--- a/api/src/schemas/__init__.py
+++ b/api/src/schemas/__init__.py
@@ -1,5 +1,6 @@
from .event import Event, EventCreate, EventUpdate, EventType
from .job import Job, JobCreate, JobUpdate
from .page import Page
+from .backup import Backup_v1, make_backup
JobPage = Page[Job]
diff --git a/api/src/schemas/backup.py b/api/src/schemas/backup.py
new file mode 100644
index 0000000..31647ea
--- /dev/null
+++ b/api/src/schemas/backup.py
@@ -0,0 +1,13 @@
+from datetime import datetime
+from typing import Literal
+from pydantic import BaseModel, Field
+from .job import Job
+
+
+class Backup_v1(BaseModel):
+ version: Literal["1"] = Field()
+ date: datetime = Field()
+ jobs: list[Job] = Field()
+
+def make_backup(jobs: list[Job]) -> Backup_v1:
+ return Backup_v1(jobs=jobs, date=datetime.now(), version="1")
\ No newline at end of file
diff --git a/api/src/settings.py b/api/src/settings.py
index fdf9278..d43044b 100644
--- a/api/src/settings.py
+++ b/api/src/settings.py
@@ -1,15 +1,14 @@
from functools import lru_cache
-from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
APP_NAME: str = "Seeking API"
- APP_HOST: str = "0.0.0.0"
+ APP_HOST: str = "127.0.0.1"
APP_PORT: int = 8000
DATABASE_NAME: str = "seeking"
DATABASE_USER: str = "postgres"
DATABASE_PASSWORD: str = "postgres"
- DATABASE_HOST: str = "0.0.0.0"
+ DATABASE_HOST: str = "127.0.0.1"
ALLOW_ORIGINS: list[str] = ["*"]
ALLOW_CREDENTIALS: bool = True
ALLOW_METHODS: list[str] = ["*"]
diff --git a/api/test_main.py b/api/test_main.py
index 948f30d..afcbd3f 100644
--- a/api/test_main.py
+++ b/api/test_main.py
@@ -1,5 +1,6 @@
from fastapi.testclient import TestClient
from urllib.parse import urlencode
+from src.schemas.backup import Backup_v1
from main import app
from src.schemas import JobPage
import logging
@@ -27,4 +28,18 @@ def test_get_page_with_date_range():
assert response.status_code == 200
page = JobPage.model_validate(response.json())
LOGGER.debug("\n{}".format(page.model_dump_json(indent=1)))
- assert page is not None
\ No newline at end of file
+ assert page is not None
+
+def test_backup():
+ response = client.get('/db/backup')
+ backup = Backup_v1.model_validate(response.json())
+ assert response.status_code == 200
+ LOGGER.debug("\n{}".format(backup.model_dump_json(indent=1)))
+ backup_tmp = open("./test_main_backup_tmp.json", "w")
+ backup_tmp.write(backup.model_dump_json(indent=1))
+
+def test_restore():
+ files = {'file': open("./test_main_backup_tmp.json", "rb")}
+ response = client.post('/db/restore/v1', files=files)
+ LOGGER.debug(response.text)
+ assert response.status_code == 200
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index a277856..b54fc48 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -88,10 +88,10 @@ services:
POSTGRES_PASSWORD: postgres
POSTGRES_DB: seeking
ports:
- - 5432
+ - 5432:5432
networks: ['backend']
healthcheck: # See https://docs.docker.com/reference/dockerfile/#healthcheck
- test: ["CMD-SHELL", "pg_isready"]
+ test: [ "CMD", "pg_isready", "-q", "-d", "seeking", "-U", "postgres" ]
start_interval: 1s
start_period: 5s
interval: 5s
diff --git a/webui/.env b/webui/.env
new file mode 100644
index 0000000..ee20bda
--- /dev/null
+++ b/webui/.env
@@ -0,0 +1,6 @@
+
+# !!! DO NOT COMMIT ANY PERSONNAL DATA HERE !!!
+#---------- use .env.local instead ------------
+NEXT_PUBLIC_API_BASE_URL=http://127.0.0.1:8000
+#---------- use .env.local instead ------------
+# !!! DO NOT COMMIT ANY PERSONNAL DATA HERE !!!
\ No newline at end of file
diff --git a/webui/app/admin/components/Report.tsx b/webui/app/admin/components/Report.tsx
new file mode 100644
index 0000000..8fca125
--- /dev/null
+++ b/webui/app/admin/components/Report.tsx
@@ -0,0 +1,64 @@
+"use client"
+import { Job } from "@/app/typings/api"
+import { Alert, Table } from "@mui/material"
+import TableBody from "@mui/material/TableBody"
+import TableCell from "@mui/material/TableCell"
+import TableContainer from "@mui/material/TableContainer"
+import TableHead from "@mui/material/TableHead"
+import TableRow from "@mui/material/TableRow"
+import moment from "moment"
+
+export type Props = {
+ jobs: Job[]
+}
+export default function Report(props: Props) {
+ const { jobs } = props;
+
+ if ( jobs.length === 0)
+ return (
+
+
+
+ No data found for the selected period
+
+
+
+ )
+
+ return (
+
+
+
+
+ Date
+ Type
+ Name
+ Person Contacted
+
+ Contact Type
+ Contact Info
+ Type of Work
+ Results
+
+
+
+
+ {jobs.flatMap((job) => (
+ job.events.map((event) => (
+
+ {moment(event.date).format("MM/DD/YYYY")}
+ Employer
+ {job.company}
+ {event.contact_person}
+ {event.contact_type}
+ {event.contact_info}
+ {job.role}
+ {event.result}
+
+ ))))
+ }
+
+
+
+ )
+}
\ No newline at end of file
diff --git a/webui/app/admin/layout.tsx b/webui/app/admin/layout.tsx
new file mode 100644
index 0000000..4dd8e0b
--- /dev/null
+++ b/webui/app/admin/layout.tsx
@@ -0,0 +1,13 @@
+"use client"
+import { PropsWithChildren } from "react";
+import { AppProvider } from "@/app/components/AppProvider";
+
+export default function PageLayout({
+ children, // will be a page or nested layout
+ }: PropsWithChildren ) {
+ return (
+
+ {children}
+
+ )
+ }
\ No newline at end of file
diff --git a/webui/app/admin/page.tsx b/webui/app/admin/page.tsx
new file mode 100644
index 0000000..4b24add
--- /dev/null
+++ b/webui/app/admin/page.tsx
@@ -0,0 +1,126 @@
+"use client"
+import { Download, Upload } from "@mui/icons-material";
+import { Alert, AlertTitle, Box, Button, Container, Divider, Input, Typography } from "@mui/material";
+import TopBar from "../components/TopBar";
+import { jobApi } from "../redux/jobApi";
+import { ChangeEvent, MouseEventHandler, useCallback, useEffect, useMemo, useState } from "react";
+import { QueryStatus } from "@reduxjs/toolkit/query";
+
+export default function ReportPage() {
+
+ const [restoreDB, restore] = jobApi.endpoints.restoreDB.useLazyQuery();
+ const [backupDB, backup] = jobApi.endpoints.backupDB.useLazyQuery();
+ const [downloadLinkVisible, setDownloadLinkVisible] = useState(false);
+ const [file, setFile] = useState(null);
+
+ useEffect(() => {
+ switch(backup.status) {
+ case QueryStatus.rejected:
+ alert("Unable to download backup")
+ break
+ case QueryStatus.fulfilled:
+ setDownloadLinkVisible(true)
+ break
+ }
+ }, [backup.status])
+
+ const downloadLink = useMemo(() => {
+
+ const result: Partial<{
+ href: string,
+ filename: string
+ }> = {}
+
+ if (!backup.data) return result
+
+ const json = JSON.stringify(backup.data, null, 2);
+ const blob = new Blob([json], { type: "application/json" });
+ result.href = URL.createObjectURL(blob)
+ result.filename = `seeking_backup_${backup.data.date}.json`
+
+ return result;
+
+ }, [backup.data])
+
+ const handleInputFileChange = useCallback((_event: ChangeEvent): void => {
+ const newFile = _event.currentTarget.files?.item(0);
+ if ( !newFile ) {
+ console.error(`File is null or undefined`)
+ return
+ }
+ setFile(newFile)
+ }, [])
+
+ const handleRestore: MouseEventHandler = useCallback((_event): void => {
+ if ( !file ) {
+ console.error(`File is null or undefined`)
+ return
+ }
+ restoreDB({file: file})
+ }, [restoreDB, file])
+
+ const handleDownload: MouseEventHandler = useCallback((_event): void => {
+ backupDB({})
+ }, [backupDB])
+
+ return (
+
+
+
+ Administration Panel
+
+
+ {/**---------------------------- Backup DB -------------------------------- */}
+ Backup the database
+
+ { downloadLinkVisible && (
+
+ Download backup file:
+
+ {downloadLink?.filename}
+
+
+ )}
+
+ or
+
+ {/**---------------------------- Restore DB ------------------------------ */}
+ Restore the database
+ { restore.isUninitialized &&
+ Caution
+ To prevent loosing your data, backup the database before to restore from a file.
+ }
+
+
+ { restore.isSuccess &&
+ DB restoration successful
+ {restore.data}
+ }
+ { restore.isError &&
+ DB restoration failed!
+ {restore.data}
+ }
+
+
+
+
+ )
+}
\ No newline at end of file
diff --git a/webui/app/components/TopBar.tsx b/webui/app/components/TopBar.tsx
index 33c944f..206d36d 100644
--- a/webui/app/components/TopBar.tsx
+++ b/webui/app/components/TopBar.tsx
@@ -55,6 +55,10 @@ export default function TopBar({
key: 'report',
label: 'Report',
href: '/report'
+ }, {
+ key: 'admin',
+ label: 'Admin',
+ href: '/admin'
}] satisfies MenuItem[]
}, [])
diff --git a/webui/app/redux/jobApi.ts b/webui/app/redux/jobApi.ts
index 0ad7bc4..4a4be4e 100644
--- a/webui/app/redux/jobApi.ts
+++ b/webui/app/redux/jobApi.ts
@@ -1,6 +1,6 @@
import { BaseQueryFn, createApi } from '@reduxjs/toolkit/query/react'
-import { Event, EventUpdate, Job, JobPage, JobCreate, EventCreate, apiErrorSchema, ApiError, JobUpdate } from '@/app/typings/api'
-import axios, { AxiosError, AxiosRequestConfig } from 'axios';
+import { Event, EventUpdate, Job, JobPage, JobCreate, EventCreate, apiErrorSchema, ApiError, JobUpdate, Backup } from '@/app/typings/api'
+import axios, { AxiosError, AxiosRequestConfig, AxiosRequestHeaders } from 'axios';
import { paths } from '../typings/openapi-autogenerated';
import { getEnvVarOrThrow } from '../utilities/getEnvVarOrThrow';
@@ -15,12 +15,13 @@ const axiosBaseQuery = ({ baseUrl }: { baseUrl: string }): BaseQueryFn<
method: AxiosRequestConfig['method'];
data?: AxiosRequestConfig['data'];
params?: AxiosRequestConfig['params'];
+ headers?: AxiosRequestConfig['headers'];
},
unknown,
unknown
-> => async ({ url, method, data, params }) => {
+> => async ({ url, method, data, params, headers }) => {
try {
- const result = await axios({ url: baseUrl + url, method, data, params });
+ const result = await axios({ url: baseUrl + url, method, data, params, headers });
return { data: result.data };
} catch (axiosError) {
let err = axiosError as AxiosError;
@@ -110,24 +111,40 @@ export const jobApi = createApi({
getPage: builder.query<
JobPage,
paths['/job/']['get']['parameters']['query']>({
- query: (params) => ({
- method: 'GET',
- url: '/job/',
- params
- }),
- providesTags: (result) => (
- result ? [
- ...result.item.flatMap(({ id, events }) => {
- return [
- { type: 'Job' as const, id }, // Job id
- ...( events ? events.map( evt => ({ type: 'Event' as const, id: evt.id })) : []) // Event ids
- ];
- }),
- { type: 'Job', id: 'LIST' },
- ]
- : [{ type: 'Job', id: 'LIST' }]
+ query: (params) => ({
+ method: 'GET',
+ url: '/job/',
+ params
+ }),
+ providesTags: (result) => (
+ result ? [
+ ...result.item.flatMap(({ id, events }) => {
+ return [
+ { type: 'Job' as const, id }, // Job id
+ ...( events ? events.map( evt => ({ type: 'Event' as const, id: evt.id })) : []) // Event ids
+ ];
+ }),
+ { type: 'Job', id: 'LIST' },
+ ]
+ : [{ type: 'Job', id: 'LIST' }]
)
}),
+ backupDB: builder.query({
+ query: () => ({
+ method: 'GET',
+ url: '/db/backup'
+ })
+ }),
+ restoreDB: builder.query({
+ query: ({ file }) => ({
+ method: 'POST',
+ url: '/db/restore/v1',
+ data: { file },
+ headers: {
+ 'Content-Type': 'multipart/form-data'
+ }
+ })
+ }),
}),
})
@@ -143,4 +160,6 @@ export const {
useUpdateEventMutation,
useDeleteEventMutation,
useGetPageQuery,
+ useBackupDBQuery,
+ useRestoreDBQuery,
} = jobApi
diff --git a/webui/app/typings/api.ts b/webui/app/typings/api.ts
index fd6ed43..48cbd4e 100644
--- a/webui/app/typings/api.ts
+++ b/webui/app/typings/api.ts
@@ -1,8 +1,7 @@
// export schemas shorthand from openapi-autogenerated schemas
-import { components, operations } from './openapi-autogenerated'
+import { components, operations, paths } from './openapi-autogenerated'
import { z } from 'zod'
-
type schemas = components['schemas']
export type Job = schemas['Job']
@@ -17,6 +16,8 @@ export type EventType = schemas['EventType']
export type PageParams = NonNullable
+export type Backup = operations['backup_db_backup_get']['responses']['200']['content']['application/json']
+
type EventTypeMeta = {
index: number;
label: string;
diff --git a/webui/app/typings/openapi-autogenerated.d.ts b/webui/app/typings/openapi-autogenerated.d.ts
index 6d9630c..91d37cb 100644
--- a/webui/app/typings/openapi-autogenerated.d.ts
+++ b/webui/app/typings/openapi-autogenerated.d.ts
@@ -31,23 +31,20 @@ export interface paths {
/** Update Event */
patch: operations["update_event_event__event_id___patch"];
};
- "/database/backup/": {
- /**
- * Backup Database
- * @description Return the whole Job table, can be restored using /database/restore/ endpoint
- */
- get: operations["backup_database_database_backup__get"];
+ "/db/backup": {
+ /** Backup */
+ get: operations["backup_db_backup_get"];
};
- "/database/restore/": {
+ "/db/restore/v1": {
/**
- * Restore Database
+ * Restore
* @description Restore the entire database from a large Job list. Existing data will be erased.
*/
- post: operations["restore_database_database_restore__post"];
+ post: operations["restore_db_restore_v1_post"];
};
"/": {
- /** Redirect Home To Docs */
- get: operations["redirect_home_to_docs__get"];
+ /** Hello World */
+ get: operations["hello_world__get"];
};
}
@@ -55,6 +52,21 @@ export type webhooks = Record;
export interface components {
schemas: {
+ /** Backup_v1 */
+ Backup_v1: {
+ /**
+ * Version
+ * @constant
+ */
+ version: "1";
+ /**
+ * Date
+ * Format: date-time
+ */
+ date: string;
+ /** Jobs */
+ jobs: components["schemas"]["Job"][];
+ };
/**
* ContactType
* @description Distinguish the different contact type an Event can have
@@ -424,28 +436,25 @@ export interface operations {
};
};
};
- /**
- * Backup Database
- * @description Return the whole Job table, can be restored using /database/restore/ endpoint
- */
- backup_database_database_backup__get: {
+ /** Backup */
+ backup_db_backup_get: {
responses: {
/** @description Successful Response */
200: {
content: {
- "application/json": components["schemas"]["Page_Job_"];
+ "application/json": components["schemas"]["Backup_v1"];
};
};
};
};
/**
- * Restore Database
+ * Restore
* @description Restore the entire database from a large Job list. Existing data will be erased.
*/
- restore_database_database_restore__post: {
+ restore_db_restore_v1_post: {
requestBody: {
content: {
- "application/json": components["schemas"]["Page_Job_"];
+ "application/json": components["schemas"]["Backup_v1"];
};
};
responses: {
@@ -463,8 +472,8 @@ export interface operations {
};
};
};
- /** Redirect Home To Docs */
- redirect_home_to_docs__get: {
+ /** Hello World */
+ hello_world__get: {
responses: {
/** @description Successful Response */
200: {