Skip to content

Commit

Permalink
Merge branch 'cleanup' of https://github.com/socfortress/CoPilot into…
Browse files Browse the repository at this point in the history
… cleanup
  • Loading branch information
Linko91 committed Aug 31, 2024
2 parents 6436a40 + 989117b commit 1cc43dc
Show file tree
Hide file tree
Showing 6 changed files with 172 additions and 3 deletions.
74 changes: 74 additions & 0 deletions backend/app/agents/routes/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from fastapi.responses import StreamingResponse
import csv
import io
from fastapi import Path
from fastapi import Security
from loguru import logger
Expand Down Expand Up @@ -460,6 +463,77 @@ async def get_agent_vulnerabilities(
return await collect_agent_vulnerabilities_new(agent_id, vulnerability_severity.value)
return await collect_agent_vulnerabilities(agent_id, vulnerability_severity.value)

@agents_router.get(
"/{agent_id}/csv/vulnerabilities",
description="Get agent vulnerabilities as CSV",
dependencies=[Security(AuthHandler().require_any_scope("admin", "analyst"))],
)
async def get_agent_vulnerabilities_csv(agent_id: str, session: AsyncSession = Depends(get_db)):
"""
Fetches the vulnerabilities of a specific agent and returns them as a CSV file.
Args:
agent_id (str): The ID of the agent.
Returns:
StreamingResponse: The response containing the agent vulnerabilities in CSV format.
"""
logger.info(f"Fetching agent {agent_id} vulnerabilities as CSV")
wazuh_new = await check_wazuh_manager_version()
if wazuh_new is True:
logger.info("Wazuh Manager version is 4.8.0 or higher. Fetching vulnerabilities using new API")
vulnerabilities = (await collect_agent_vulnerabilities_new(agent_id, vulnerability_severity="High")).vulnerabilities
else:
vulnerabilities = await collect_agent_vulnerabilities(agent_id, vulnerability_severity="Critical")
# Create a CSV file
logger.info(f"Creating CSV file for agent {agent_id} with {len(vulnerabilities)} vulnerabilities")
logger.info(f"Vulnerabilities: {vulnerabilities}")
output = io.StringIO()
writer = csv.writer(output)
# Write the header
writer.writerow(
[
"Severity",
"Version",
"Type",
"Name",
"External References",
"Detection Time",
"CVSS3 Score",
"Published",
"Architecture",
"CVE",
"Status",
"Title",
],
)
# Write the rows
for vulnerability in vulnerabilities:
writer.writerow(
[
vulnerability.severity,
vulnerability.version,
vulnerability.type,
vulnerability.name,
', '.join(vulnerability.external_references) if vulnerability.external_references else "",
vulnerability.detection_time,
vulnerability.cvss3_score,
vulnerability.published,
vulnerability.architecture,
vulnerability.cve,
vulnerability.status,
vulnerability.title,
],
)
# Return the CSV file as a streaming response
output.seek(0)
return StreamingResponse(
content=output.getvalue(),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename={agent_id}_vulnerabilities.csv"},
)



@agents_router.get(
"/{agent_id}/sca",
Expand Down
1 change: 0 additions & 1 deletion backend/app/connectors/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,6 @@ def get_connector_service(connector_name: str) -> Type[ConnectorServiceInterface
"Wazuh-Indexer": WazuhIndexerService,
"Velociraptor": VelociraptorService,
"Graylog": GraylogService,
"DFIR-IRIS": DfirIrisService,
"Cortex": CortexService,
"Shuffle": ShuffleService,
"Sublime": SublimeService,
Expand Down
69 changes: 68 additions & 1 deletion backend/app/customer_provisioning/services/decommission.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@ async def decomission_wazuh_customer(
session=session,
)

# Decommission HAProxy
await decommission_haproxy(
request=DecommissionWorkerRequest(customer_name=customer_meta.customer_name),
session=session,
)

# Delete Customer Meta
await session.delete(customer_meta)
await session.commit()
Expand Down Expand Up @@ -98,8 +104,19 @@ async def decommission_wazuh_worker(
ProvisionWorkerResponse: The response object indicating the success or failure of the provisioning operation.
"""
logger.info(f"Decommissioning Wazuh worker {request}")
# Check if the connector is verified
if await get_connector_attribute(
connector_name="Wazuh Worker Provisioning",
column_name="connector_verified",
session=session,
) is False:
logger.info("Wazuh Worker Provisioning connector is not verified, skipping ...")
return DecommissionWorkerResponse(
success=False,
message="Wazuh Worker Provisioning connector is not verified",
)
api_endpoint = await get_connector_attribute(
connector_id=13,
connector_name="Wazuh Worker Provisioning",
column_name="connector_url",
session=session,
)
Expand All @@ -119,3 +136,53 @@ async def decommission_wazuh_worker(
success=True,
message="Wazuh worker provisioned successfully",
)


######### ! Decommission HAProxy ! ############
async def decommission_haproxy(
request: DecommissionWorkerRequest,
session: AsyncSession,
) -> DecommissionWorkerResponse:
"""
Decomissions a HAProxy worker.
Args:
request (DecommissionWorkerRequest): The request object containing the necessary information for provisioning.
session (AsyncSession): The async session object for making HTTP requests.
Returns:
ProvisionWorkerResponse: The response object indicating the success or failure of the provisioning operation.
"""
logger.info(f"Decommissioning HAProxy worker {request}")
# Check if the connector is verified
if await get_connector_attribute(
connector_name="HAProxy Provisioning",
column_name="connector_verified",
session=session,
) is False:
logger.info("HAProxy Provisioning connector is not verified, skipping ...")
return DecommissionWorkerResponse(
success=False,
message="HAProxy Provisioning connector is not verified",
)
api_endpoint = await get_connector_attribute(
connector_name="HAProxy Provisioning",
column_name="connector_url",
session=session,
)
# Send the POST request to the HAProxy worker
response = requests.post(
url=f"{api_endpoint}/provision_worker/haproxy/decommission",
json=request.dict(),
)
# Check the response status code
if response.status_code != 200:
return DecommissionWorkerResponse(
success=False,
message=f"Failed to provision HAProxy worker: {response.text}",
)
# Return the response
return DecommissionWorkerResponse(
success=True,
message="HAProxy worker provisioned successfully",
)
1 change: 1 addition & 0 deletions backend/app/customer_provisioning/services/provision.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ async def provision_wazuh_worker(
url=f"{api_endpoint}/provision_worker",
json=request.dict(),
)
logger.info(f"Status code from Wazuh Worker: {response.status_code}")
# Check the response status code
if response.status_code != 200:
return ProvisionWorkerResponse(
Expand Down
18 changes: 18 additions & 0 deletions backend/app/db/db_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from alembic import command
from alembic.config import Config
from app.auth.services.universal import create_admin_user
from app.schedulers.routes.scheduler import delete_job
from app.auth.services.universal import create_scheduler_user
from app.auth.services.universal import remove_scheduler_user
from app.db.db_populate import add_available_integrations_auth_keys_if_not_exist
Expand Down Expand Up @@ -298,6 +299,23 @@ async def ensure_scheduler_user(async_engine):
# Pass the session to the inner function
await create_scheduler_user(session)

async def delete_job_if_exists(async_engine):
"""
Deletes a job from the database if it exists.
Args:
job_id (str): The ID of the job to delete.
Returns:
None
"""
job_id = "wazuh_index_fields_resize"
logger.info(f"Deleting job with ID {job_id}")
async with AsyncSession(async_engine) as session:
async with session.begin():
# Pass the session to the inner function
await delete_job(session, job_id)


async def ensure_scheduler_user_removed(async_engine):
"""
Expand Down
12 changes: 11 additions & 1 deletion backend/app/schedulers/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
)
from app.schedulers.services.wazuh_index_resize import resize_wazuh_index_fields


def scheduler_listener(event):
if event.exception:
logger.error(f"Job {event.job_id} crashed: {event.exception}")
Expand Down Expand Up @@ -164,6 +163,16 @@ async def schedule_enabled_jobs(scheduler):
Schedules jobs that are enabled in the database.
"""
async with AsyncSession(async_engine) as session:
# ! First disable the job of `invoke_wazuh_monitoring_alert` if it is enabled
# TODO ! Inefficient as hell but I will come back to this later
stmt = select(JobMetadata).where(JobMetadata.job_id == "invoke_wazuh_monitoring_alert")
result = await session.execute(stmt)
job_metadata = result.scalars().one_or_none()
if job_metadata:
logger.info("Disabling job: invoke_wazuh_monitoring_alert")
job_metadata.enabled = False
await session.commit()

stmt = select(JobMetadata).where(JobMetadata.enabled == True)
result = await session.execute(stmt)
job_metadatas = result.scalars().all()
Expand Down Expand Up @@ -198,6 +207,7 @@ async def schedule_enabled_jobs(scheduler):
logger.error(f"Error scheduling job: {e}")



def get_function_by_name(function_name: str):
"""
Returns a function object based on its name.
Expand Down

0 comments on commit 1cc43dc

Please sign in to comment.