Skip to content

Commit

Permalink
feat(*): backup/restore DB
Browse files Browse the repository at this point in the history
  • Loading branch information
berdal84 committed May 16, 2024
1 parent a4900af commit ab2f63e
Show file tree
Hide file tree
Showing 20 changed files with 428 additions and 93 deletions.
2 changes: 2 additions & 0 deletions api/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -158,3 +158,5 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

**/test_*.json
28 changes: 15 additions & 13 deletions api/main.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,22 @@
from fastapi import FastAPI, Request
from fastapi.responses import RedirectResponse
from fastapi import FastAPI
from starlette.middleware.cors import CORSMiddleware
import uvicorn
from src import settings
from src import database_utils, api

database_utils.init()
database_utils.create_all()
settings = settings.get_settings()

app = FastAPI(
title=settings.APP_NAME,
version='0.1.0',
version='12.0',
description=f"{settings.APP_NAME} is a web API to manage job applications, it is written in Python using FastAPI.",
contact={
"Name": "Bérenger Dalle-Cort",
"Email": "berenger@42borgata.com"
},
},
separate_input_output_schemas=False)

app.include_router(api.jobApi.router)
app.include_router(api.eventApi.router)
app.include_router(api.databaseApi.router)

@app.get("/")
async def redirect_home_to_docs():
return RedirectResponse(url='/docs')

app.add_middleware(
CORSMiddleware,
allow_origins=settings.ALLOW_ORIGINS,
Expand All @@ -34,6 +25,17 @@ async def redirect_home_to_docs():
allow_headers=settings.ALLOW_HEADERS,
)

app.router.redirect_slashes = False # Optional

app.include_router(api.jobApi.router)
app.include_router(api.eventApi.router)
app.include_router(api.databaseApi.router)

@app.get("/")
async def hello_world():
return f"Hello World, my name is {app.title} {app.version}"


""" Provide a way to run the app using `python -m main` command"""
if __name__ == "__main__":
uvicorn.run("main:app", host=settings.APP_HOST, port=settings.APP_PORT, log_level="info", reload=True)
3 changes: 2 additions & 1 deletion api/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ SQLAlchemy~=2.0.23
starlette~=0.27.0
pydantic~=2.5.1
postgres~=4.0
pydantic-settings~=2.2.1
pydantic-settings~=2.2.1
python-multipart~=0.0.9
51 changes: 34 additions & 17 deletions api/src/api/databaseApi.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,52 @@
from fastapi import HTTPException, Depends
from itertools import count
from typing import Annotated
from fastapi import File, Form, HTTPException, Depends, UploadFile
from pydantic import ValidationError
from sqlalchemy.orm import Session
from starlette import status
from src.schemas.backup import Backup_v1, make_backup
from src import schemas, crud, database_utils
from src.database_utils import get_db
from fastapi import APIRouter

router = APIRouter(
prefix="/database",
prefix="/db",
tags=['database']
)


@router.get("/backup/")
async def backup_database(session: Session = Depends(get_db)) -> schemas.Page[schemas.Job]:

""" Return the whole Job table, can be restored using /database/restore/ endpoint"""
@router.get("/backup")
async def backup(session: Session = Depends(get_db)) -> schemas.Backup_v1:

# Extract the list of ALL jobs
page = crud.jobCrud.get_all(session)
return schemas.Page[schemas.Job](
item=list(map(lambda job: schemas.Job.model_validate(job), page.item)),
item_total_count=page.item_total_count
)
jobs = list(map(lambda job: schemas.Job.model_validate(job), page.item))

# Return a backup with the list
return make_backup(jobs)


@router.post("/restore/")
async def restore_database(jobs: schemas.Page[schemas.Job], session: Session = Depends(get_db)) -> str:
@router.post("/restore/v1")
async def restore(
file: Annotated[UploadFile, File()],
session: Session = Depends(get_db)
) -> str:

""" Restore the entire database from a large Job list. Existing data will be erased. """

if database_utils.restore(jobs, session) is False:
""" Restore the entire database from a large Job list. Existing data will be erased. """

backup: schemas.Backup_v1

try:
file_str = file.file.read()
backup = schemas.Backup_v1.model_validate_json(file_str)
except ValidationError as err:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail='Unable to restore the database')
return "Database restored"
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail=err.json())

if database_utils.erase_and_restore(backup.jobs, session) is False:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail='Unable to restore the database')
return f"Database restored from {file.filename}"
2 changes: 1 addition & 1 deletion api/src/api/jobApi.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
from src.database_utils import get_db
from src.exceptions import JobNotFoundException
from src.schemas import Page

from fastapi import APIRouter

router = APIRouter(
tags=['job'],
prefix="/job"
Expand Down
4 changes: 2 additions & 2 deletions api/src/crud/jobCrud.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,6 @@ def get_page(session: Session,
# 3) Handle pagination
return paginate(query, skip, limit)

def get_all(session):
query = session.query(models.Job).all()
def get_all(session: Session):
query = session.query(models.Job)
return paginate(query)
61 changes: 52 additions & 9 deletions api/src/database_utils.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,68 @@
from fastapi import HTTPException, status
from sqlalchemy.orm import Session
from src import schemas, models
from src.database import Base, engine, SessionLocal


def init():
def create_all():
Base.metadata.create_all(bind=engine)


def reset():
def drop_all():
Base.metadata.drop_all(bind=engine)


def restore(page: schemas.Page[schemas.Job], session: Session) -> bool:
def erase_and_restore(jobs: list[schemas.Job], session: Session) -> bool:

# Drop all tables and initialize
reset()
init()
# Convert Jobs and Events to ORM model
jobs_to_insert = list()
events_to_insert = list()
for _job in jobs:
job = models.Job(**_job.model_dump(exclude={ 'events' })) # Nested dump is failing TODO: investigate
jobs_to_insert.append(job)
for _event in _job.events:
event = models.Event(**_event.model_dump())
event.job_id = job.id;
events_to_insert.append(event)


if len(jobs_to_insert) != len(jobs):
HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail=f"Unable to convert all schemas to models"
)

# Try to drop existing tables
try:
drop_all()
except:
session.rollback()
HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Unable to drop existing tables, rollback rollbacked"
)

# Try to create clean tables
try:
create_all()
except:
session.rollback()
HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Unable to create tables after dropping existing, transaction rollbacked"
)

# Add jobs to restore to the db
try:
session.add_all(jobs_to_insert)
session.add_all(events_to_insert)
except Exception:
session.rollback()
HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Unable to restore backup, transaction rollbacked"
)

# Insert back
db_jobs: list[models.Job] = list(map(lambda j: models.Job(j), page.item))
session.add_all(db_jobs)
session.commit()

return True
Expand Down
1 change: 1 addition & 0 deletions api/src/schemas/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from .event import Event, EventCreate, EventUpdate, EventType
from .job import Job, JobCreate, JobUpdate
from .page import Page
from .backup import Backup_v1, make_backup

JobPage = Page[Job]
13 changes: 13 additions & 0 deletions api/src/schemas/backup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
from .job import Job


class Backup_v1(BaseModel):
version: Literal["1"] = Field()
date: datetime = Field()
jobs: list[Job] = Field()

def make_backup(jobs: list[Job]) -> Backup_v1:
return Backup_v1(jobs=jobs, date=datetime.now(), version="1")
5 changes: 2 additions & 3 deletions api/src/settings.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
from functools import lru_cache
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict

class Settings(BaseSettings):
APP_NAME: str = "Seeking API"
APP_HOST: str = "0.0.0.0"
APP_HOST: str = "127.0.0.1"
APP_PORT: int = 8000
DATABASE_NAME: str = "seeking"
DATABASE_USER: str = "postgres"
DATABASE_PASSWORD: str = "postgres"
DATABASE_HOST: str = "0.0.0.0"
DATABASE_HOST: str = "127.0.0.1"
ALLOW_ORIGINS: list[str] = ["*"]
ALLOW_CREDENTIALS: bool = True
ALLOW_METHODS: list[str] = ["*"]
Expand Down
17 changes: 16 additions & 1 deletion api/test_main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from fastapi.testclient import TestClient
from urllib.parse import urlencode
from src.schemas.backup import Backup_v1
from main import app
from src.schemas import JobPage
import logging
Expand Down Expand Up @@ -27,4 +28,18 @@ def test_get_page_with_date_range():
assert response.status_code == 200
page = JobPage.model_validate(response.json())
LOGGER.debug("\n{}".format(page.model_dump_json(indent=1)))
assert page is not None
assert page is not None

def test_backup():
response = client.get('/db/backup')
backup = Backup_v1.model_validate(response.json())
assert response.status_code == 200
LOGGER.debug("\n{}".format(backup.model_dump_json(indent=1)))
backup_tmp = open("./test_main_backup_tmp.json", "w")
backup_tmp.write(backup.model_dump_json(indent=1))

def test_restore():
files = {'file': open("./test_main_backup_tmp.json", "rb")}
response = client.post('/db/restore/v1', files=files)
LOGGER.debug(response.text)
assert response.status_code == 200
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,10 @@ services:
POSTGRES_PASSWORD: postgres
POSTGRES_DB: seeking
ports:
- 5432
- 5432:5432
networks: ['backend']
healthcheck: # See https://docs.docker.com/reference/dockerfile/#healthcheck
test: ["CMD-SHELL", "pg_isready"]
test: [ "CMD", "pg_isready", "-q", "-d", "seeking", "-U", "postgres" ]
start_interval: 1s
start_period: 5s
interval: 5s
Expand Down
6 changes: 6 additions & 0 deletions webui/.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

# !!! DO NOT COMMIT ANY PERSONNAL DATA HERE !!!
#---------- use .env.local instead ------------
NEXT_PUBLIC_API_BASE_URL=http://127.0.0.1:8000
#---------- use .env.local instead ------------
# !!! DO NOT COMMIT ANY PERSONNAL DATA HERE !!!
64 changes: 64 additions & 0 deletions webui/app/admin/components/Report.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
"use client"
import { Job } from "@/app/typings/api"
import { Alert, Table } from "@mui/material"
import TableBody from "@mui/material/TableBody"
import TableCell from "@mui/material/TableCell"
import TableContainer from "@mui/material/TableContainer"
import TableHead from "@mui/material/TableHead"
import TableRow from "@mui/material/TableRow"
import moment from "moment"

export type Props = {
jobs: Job[]
}
export default function Report(props: Props) {
const { jobs } = props;

if ( jobs.length === 0)
return (
<TableRow>
<TableCell rowSpan={8}>
<Alert severity="info">
No data found for the selected period
</Alert>
</TableCell>
</TableRow>
)

return (
<TableContainer>
<Table size="small">
<TableHead>
<TableRow>
<TableCell>Date</TableCell>
<TableCell>Type</TableCell>
<TableCell>Name</TableCell>
<TableCell>Person Contacted</TableCell>

<TableCell>Contact Type</TableCell>
<TableCell>Contact Info</TableCell>
<TableCell>Type of Work</TableCell>
<TableCell>Results</TableCell>
</TableRow>
</TableHead>

<TableBody>
{jobs.flatMap((job) => (
job.events.map((event) => (
<TableRow key={`${job.id}-${event.id}`}>
<TableCell>{moment(event.date).format("MM/DD/YYYY")}</TableCell>
<TableCell>Employer</TableCell>
<TableCell>{job.company}</TableCell>
<TableCell>{event.contact_person}</TableCell>
<TableCell>{event.contact_type}</TableCell>
<TableCell>{event.contact_info}</TableCell>
<TableCell>{job.role}</TableCell>
<TableCell>{event.result}</TableCell>
</TableRow>
))))
}
</TableBody>
</Table>
</TableContainer>
)
}
Loading

0 comments on commit ab2f63e

Please sign in to comment.