diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..5f3dd6df --- /dev/null +++ b/.editorconfig @@ -0,0 +1,30 @@ +# EditorConfig is awesome: http://EditorConfig.org + +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true +charset = utf-8 +end_of_line = lf + +# 2 spaces for YAML +[*.{yml,yaml}] +indent_size = 2 + +# 2 spaces for web things +[*.{js,vue,html,json}] +indent_size = 2 + +# Windows, windows, windows +[*.{bat,cmd,ps1}] +indent_style = tab +end_of_line = crlf + +# Makefile, tabs are a must +[Makefile] +indent_style = tab diff --git a/.eslintrc.cjs b/.eslintrc.cjs new file mode 100644 index 00000000..c1c98203 --- /dev/null +++ b/.eslintrc.cjs @@ -0,0 +1,22 @@ +/* eslint-env node */ +require("@rushstack/eslint-patch/modern-module-resolution") + +module.exports = { + root: true, + extends: [ + "plugin:vue/vue3-essential", + "eslint:recommended", + "@vue/eslint-config-typescript", + "@vue/eslint-config-prettier" + ], + overrides: [ + { + files: ["cypress/e2e/**.{cy,spec}.{js,ts,jsx,tsx}"], + extends: ["plugin:cypress/recommended"] + } + ], + parserOptions: { + ecmaVersion: "latest", + sourceType: "module" + } +} diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..dbbf6cc3 --- /dev/null +++ b/.flake8 @@ -0,0 +1,12 @@ +[flake8] +#max-complexity = 18 +max-line-length = 120 +#select = B,C,E,F,W,T4,B9 +#ignore = E203, E266, E501, W503, F403, F401 +ignore = W503, E231, W605 + # W503, # line break before binary operator + # E231, # missing whitespace after ',' (caused by black style) + # W605, # invalid escape sequence (caused by regex) +extend-ignore = E203 +exclude = + .venv diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..9d17c655 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,16 @@ +name: pre-commit + +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + - uses: pre-commit/action@v3.0.0 diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..7c9641bb --- /dev/null +++ b/.gitignore @@ -0,0 +1,41 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +.DS_Store +dist +dist-ssr +coverage +*.local + +/cypress/videos/ +/cypress/screenshots/ + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + +# Python Virtualenv +__pycache__/ +*.py[cod] +build/ +.venv/ +wheels/ +*.egg-info/ +*.egg + +# Local development artifacts +*.db +.env diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000..68422384 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,4 @@ +[settings] +profile=black +force_single_line=True +src_paths=backend diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..f659cdff --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,62 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-merge-conflict + - id: check-yaml + args: ["--unsafe"] + - id: end-of-file-fixer + - id: check-json + - id: trailing-whitespace + - id: check-added-large-files + - id: detect-private-key + - id: requirements-txt-fixer + args: ["backend/requirements.txt", "backend/requirements.in"] + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + name: Sort python imports (shows diff) + args: ["-c", "--diff"] + - id: isort + name: Sort python imports (fixes files) + + - repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black + language_version: python3.9 + + - repo: https://github.com/asottile/setup-cfg-fmt + rev: v2.3.0 + hooks: + - id: setup-cfg-fmt + + - repo: https://github.com/asottile/add-trailing-comma + rev: v2.4.0 + hooks: + - id: add-trailing-comma + + - repo: https://github.com/PyCQA/flake8 + rev: 6.0.0 + hooks: + - id: flake8 + name: Check project styling + + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v3.0.0-alpha.6" + hooks: + - id: prettier + + - repo: https://github.com/pre-commit/mirrors-eslint + rev: v8.41.0 + hooks: + - id: eslint + files: \.([cjt]sx?|[cm]ts|[cm]js|cvue)$ # *.js, *.jsx, *.ts, *.tsx and *.vue + additional_dependencies: + - "@rushstack/eslint-patch@1.2.0" + - eslint@8.39.0 + - "@vue/eslint-config-prettier@7.1.0" + - "@vue/eslint-config-typescript@11.0.2" + - eslint-plugin-cypress@2.13.3 + - eslint-plugin-vue@9.11.1 diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 00000000..f390a0bf --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,8 @@ +{ + "arrowParens": "avoid", + "printWidth": 120, + "semi": false, + "tabWidth": 4, + "trailingComma": "none", + "useTabs": false +} diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..ad0afdff --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,16 @@ +{ + "recommendations": [ + "ms-python.python", + "ms-python.black-formatter", + "editorconfig.editorconfig", + "github.vscode-github-actions", + "ecmel.vscode-html-css", + "george-alison.html-preview-vscodes", + "esbenp.prettier-vscode", + "octref.vetur", + "ms-vscode.remtoe-remote-wsl", + "dbaeumer.vscode-eslint", + "Vue.volar", + "Vue.vscode-typescript-vue-plugin" + ] +} diff --git a/Vagrantfile b/Vagrantfile new file mode 100644 index 00000000..e0dbb4d8 --- /dev/null +++ b/Vagrantfile @@ -0,0 +1,3 @@ +Vagrant.configure("2") do |config| + config.vm.box = "debian/bullseye64" +end diff --git a/backend/README.MD b/backend/README.MD new file mode 100644 index 00000000..1c98751d --- /dev/null +++ b/backend/README.MD @@ -0,0 +1,168 @@ +# DB Modals + +```mermaid +classDiagram + class ConnectorsAvailable { + +int id + +str connector_name + +str connector_description + +str connector_supports + +bool connector_configured + +bool connector_verified + } + + class Connectors { + +int id + +str connector_name + +str connector_type + +str connector_url + +DateTime connector_last_updated + +str connector_username + +str connector_password + +str connector_api_key + } + + class DisabledRules { + +int id + +str rule_id + +str previous_level + +str new_level + +str reason_for_disabling + +DateTime date_disabled + +int length_of_time + } + + class WazuhIndexerAllocation { + +int id + +str node + +float disk_used + +float disk_available + +float disk_total + +float disk_percent + +DateTime timestamp + } + + class GraylogMetricsAllocation { + +int id + +float input_usage + +float output_usage + +float processor_usage + +float input_1_sec_rate + +float output_1_sec_rate + +float total_input + +float total_output + +DateTime timestamp + } + + class AgentMetadata { + +int id + +str agent_id + +str ip_address + +str os + +str hostname + +bool critical_asset + +DateTime last_seen + } + + class Case { + +int id + +int case_id + +str case_name + +str agents + } + + class Artifact { + +int id + +str artifact_name + +JSONB artifact_results + +str hostname + } +``` + + +# Connector Classes + +```mermaid +classDiagram + class Connector { + +attributes: dict + +verify_connection() + +get_connector_info_from_db(connector_name: str) + } + + class WazuhIndexerConnector { + +verify_connection() + } + Connector <|-- WazuhIndexerConnector + + class GraylogConnector { + +verify_connection() + } + Connector <|-- GraylogConnector + + class WazuhManagerConnector { + +verify_connection() + } + Connector <|-- WazuhManagerConnector + + class ShuffleConnector { + +verify_connection() + } + Connector <|-- ShuffleConnector + + class DfirIrisConnector { + +verify_connection() + } + Connector <|-- DfirIrisConnector + + class VelociraptorConnector { + +verify_connection() + } + Connector <|-- VelociraptorConnector + + class RabbitMQConnector { + +verify_connection() + } + Connector <|-- RabbitMQConnector + + class ConnectorFactory { + -_creators: dict + +register_creator(key: str, creator: str) + +create(key: str, connector_name: str) + } +``` + +# Routes + +```mermaid +graph TD; + A["/connectors (GET)"] --> B["list_connectors_available()"] + C["/connectors/wazuh-manager (GET)"] --> D["get_wazuh_manager_connector()"] + E["/connectors/ (PUT)"] --> F["update_connector_route(id)"] +``` + +# Responses + +```mermaid +graph TD; + A[update_connector_in_db] --> B[Return Data] + C[update_connector] --> D[Return Data] + E[process_connector] --> F[Return Data] + G[ConnectorFactory.create] --> H[Connector Instance] + H --> I[WazuhIndexerConnector.verify_connection] + H --> J[GraylogConnector.verify_connection] + H --> K[WazuhManagerConnector.verify_connection] + H --> L[DfirIrisConnector.verify_connection] + H --> M[VelociraptorConnector.verify_connection] + H --> N[RabbitMQConnector.verify_connection] + H --> O[ShuffleConnector.verify_connection] + I --> P[Return Data] + J --> Q[Return Data] + K --> R[Return Data] + L --> S[Return Data] + M --> T[Return Data] + N --> U[Return Data] + O --> V[Return Data] + +``` + diff --git a/backend/app/__init__.py b/backend/app/__init__.py new file mode 100644 index 00000000..57adf795 --- /dev/null +++ b/backend/app/__init__.py @@ -0,0 +1,58 @@ +from flask import Flask +from flask_cors import CORS +from flask_marshmallow import Marshmallow +from flask_migrate import Migrate +from flask_sqlalchemy import SQLAlchemy +from flask_swagger_ui import get_swaggerui_blueprint + +# from app.routes import bp # Import the blueprint + +app = Flask(__name__) + +SWAGGER_URL = "/api/docs" # URL for exposing Swagger UI (without trailing '/') +API_URL = "/static/swagger.json" # Our API url (can of course be a local resource) + +swaggerui_blueprint = get_swaggerui_blueprint( + SWAGGER_URL, # Swagger UI static files will be mapped to '{SWAGGER_URL}/dist/' + API_URL, + config={"app_name": "Test application"}, # Swagger UI config overrides + # oauth_config={ # OAuth config. See https://github.com/swagger-api/swagger-ui#oauth2-configuration . + # 'clientId': "your-client-id", + # 'clientSecret': "your-client-secret-if-required", + # 'realm': "your-realms", + # 'appName': "your-app-name", + # 'scopeSeparator': " ", + # 'additionalQueryStringParams': {'test': "hello"} + # } +) + +app.register_blueprint(swaggerui_blueprint) + +CORS(app) + + +app.config.from_object("settings") + +db = SQLAlchemy(app) +migrate = Migrate(app, db) +ma = Marshmallow(app) + +from app.routes.connectors import bp as connectors_bp # Import the blueprint +from app.routes.agents import bp as agents_bp # Import the blueprint +from app.routes.rules import bp as rules_bp # Import the blueprint +from app.routes.graylog import bp as graylog_bp # Import the blueprint +from app.routes.alerts import bp as alerts_bp # Import the blueprint +from app.routes.wazuhindexer import bp as wazuhindexer_bp # Import the blueprint +from app.routes.shuffle import bp as shuffle_bp # Import the blueprint +from app.routes.velociraptor import bp as velociraptor_bp # Import the blueprint +from app.routes.dfir_iris import bp as dfir_iris_bp # Import the blueprint + +app.register_blueprint(connectors_bp) # Register the connectors blueprint +app.register_blueprint(agents_bp) # Register the agents blueprint +app.register_blueprint(rules_bp) # Register the rules blueprint +app.register_blueprint(graylog_bp) # Register the graylog blueprint +app.register_blueprint(alerts_bp) # Register the alerts blueprint +app.register_blueprint(wazuhindexer_bp) # Register the wazuhindexer blueprint +app.register_blueprint(shuffle_bp) # Register the shuffle blueprint +app.register_blueprint(velociraptor_bp) # Register the velociraptor blueprint +app.register_blueprint(dfir_iris_bp) # Register the dfir_iris blueprint diff --git a/backend/app/models/agents.py b/backend/app/models/agents.py new file mode 100644 index 00000000..1c0e3b5f --- /dev/null +++ b/backend/app/models/agents.py @@ -0,0 +1,69 @@ +from datetime import datetime + +from loguru import logger +from sqlalchemy.dialects.postgresql import JSONB # Add this line + +from app import db +from app import ma + + +# Class for agent metadata which stores the agent ID, IP address, hostname, OS, last seen timestamp, +# and boolean for critical assest. +# Path: backend\app\models.py +class AgentMetadata(db.Model): + id = db.Column(db.Integer, primary_key=True) + agent_id = db.Column(db.String(100)) + ip_address = db.Column(db.String(100)) + os = db.Column(db.String(100)) + hostname = db.Column(db.String(100)) + critical_asset = db.Column(db.Boolean, default=False) + last_seen = db.Column(db.DateTime) + + def __init__(self, agent_id, ip_address, os, hostname, critical_asset, last_seen): + self.agent_id = agent_id + self.ip_address = ip_address + self.os = os + self.hostname = hostname + self.critical_asset = critical_asset + self.last_seen = last_seen + + def __repr__(self): + return f"" + + def mark_as_critical(self): + """ + Marks the agent as a critical asset. + """ + self.critical_asset = True + db.session.commit() + + def mark_as_non_critical(self): + """ + Marks the agent as a non-critical asset. + """ + self.critical_asset = False + db.session.commit() + + def commit_wazuh_agent_to_db(self): + """ + Commits the agent to the database. + """ + db.session.add(self) + db.session.commit() + + +class AgentMetadataSchema(ma.Schema): + class Meta: + fields = ( + "id", + "agent_id", + "ip_address", + "os", + "hostname", + "critical_asset", + "last_seen", + ) + + +agent_metadata_schema = AgentMetadataSchema() +agent_metadatas_schema = AgentMetadataSchema(many=True) diff --git a/backend/app/models/connectors.py b/backend/app/models/connectors.py new file mode 100644 index 00000000..410c39a1 --- /dev/null +++ b/backend/app/models/connectors.py @@ -0,0 +1,483 @@ +import importlib +import json +import os +import pika +from dataclasses import dataclass +import requests +from abc import ABC, abstractmethod +from elasticsearch7 import Elasticsearch +from loguru import logger +from sqlalchemy.orm.exc import NoResultFound +import pyvelociraptor +from pyvelociraptor import api_pb2 +from pyvelociraptor import api_pb2_grpc +from werkzeug.utils import secure_filename +import grpc + +from sqlalchemy.exc import SQLAlchemyError +from flask import current_app + +from app.models.models import Connectors, connectors_schema, ConnectorsAvailable + + +def dynamic_import(module_name, class_name): + """ + This function dynamically imports a module and returns a specific class from it. + + :param module_name: A string that specifies the name of the module to import. + :param class_name: A string that specifies the name of the class to get from the module. + :return: The class specified by class_name from the module specified by module_name. + """ + module = importlib.import_module(module_name) + class_ = getattr(module, class_name) + return class_ + + +@dataclass +class Connector(ABC): + """ + This abstract base class defines the interface for a connector. A connector is an object that + connects to a specific service or system and performs actions on it. The specific service or + system a connector connects to is defined by the connector's attributes. + + :param attributes: A dictionary of attributes necessary for the connector to connect to the service or system. + """ + + attributes: dict + + @abstractmethod + def verify_connection(self): + """ + This abstract method should be implemented by all subclasses of Connector. It is meant to verify the + connection to the service or system the connector is designed to connect to. + + :return: Depends on the implementation in the subclass. + """ + pass + + @staticmethod + def get_connector_info_from_db(connector_name): + """ + This method retrieves connector information from the database. + + :param connector_name: A string that specifies the name of the connector whose information is to be retrieved. + :return: A dictionary of the connector's attributes if the connector exists. Otherwise, it raises a NoResultFound exception. + Raises: + NoResultFound: If the connector_name is not found in the database. + """ + connector = ( + current_app.extensions["sqlalchemy"] + .db.session.query(Connectors) + .filter_by(connector_name=connector_name) + .first() + ) + if connector: + attributes = { + col.name: getattr(connector, col.name) + for col in Connectors.__table__.columns + } + return attributes + else: + raise NoResultFound + + +class WazuhIndexerConnector(Connector): + """ + This class represents a connector for the Wazuh indexer service. It is a subclass of Connector. + + :param connector_name: A string that specifies the name of the connector. + """ + + def __init__(self, connector_name): + super().__init__(attributes=self.get_connector_info_from_db(connector_name)) + + def verify_connection(self): + """ + This method verifies the connection to the Wazuh indexer service. + + :return: A dictionary containing the status of the connection attempt and information about the cluster's health. + """ + logger.info( + f"Verifying the wazuh-indexer connection to {self.attributes['connector_url']}" + ) + try: + es = Elasticsearch( + [self.attributes["connector_url"]], + http_auth=( + self.attributes["connector_username"], + self.attributes["connector_password"], + ), + verify_certs=False, + timeout=15, + max_retries=10, + retry_on_timeout=False, + ) + cluster_health = es.cluster.health() + logger.info(f"Connection to {self.attributes['connector_url']} successful") + return {"connectionSuccessful": True} + except Exception as e: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {e}" + ) + return {"connectionSuccessful": False, "clusterHealth": None} + + +class GraylogConnector(Connector): + """ + This class represents a connector for the Graylog service. It is a subclass of Connector. + + :param connector_name: A string that specifies the name of the connector. + """ + + def __init__(self, connector_name): + super().__init__(attributes=self.get_connector_info_from_db(connector_name)) + + def verify_connection(self): + """ + Verifies the connection to Graylog service. + + Returns: + dict: A dictionary containing 'connectionSuccessful' status and 'roles' if the connection is successful. + """ + logger.info( + f"Verifying the graylog connection to {self.attributes['connector_url']}" + ) + try: + graylog_roles = requests.get( + f"{self.attributes['connector_url']}/api/authz/roles/user/{self.attributes['connector_username']}", + auth=( + self.attributes["connector_username"], + self.attributes["connector_password"], + ), + verify=False, + ) + if graylog_roles.status_code == 200: + logger.info( + f"Connection to {self.attributes['connector_url']} successful" + ) + return {"connectionSuccessful": True} + else: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {graylog_roles.text}" + ) + return {"connectionSuccessful": False, "roles": None} + except Exception as e: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {e}" + ) + return {"connectionSuccessful": False, "roles": None} + + +class WazuhManagerConnector(Connector): + """ + This class represents a connector for the Wazuh manager service. It is a subclass of Connector. + + :param connector_name: A string that specifies the name of the connector. + """ + + def __init__(self, connector_name): + super().__init__(attributes=self.get_connector_info_from_db(connector_name)) + + def verify_connection(self): + """ + Verifies the connection to Wazuh manager service. + + Returns: + dict: A dictionary containing 'connectionSuccessful' status and 'authToken' if the connection is successful. + """ + logger.info( + f"Verifying the wazuh-manager connection to {self.attributes['connector_url']}" + ) + try: + wazuh_auth_token = requests.get( + f"{self.attributes['connector_url']}/security/user/authenticate", + auth=( + self.attributes["connector_username"], + self.attributes["connector_password"], + ), + verify=False, + ) + if wazuh_auth_token.status_code == 200: + logger.debug("Wazuh Authentication Token successful") + wazuh_auth_token = wazuh_auth_token.json() + wazuh_auth_token = wazuh_auth_token["data"]["token"] + return {"connectionSuccessful": True, "authToken": wazuh_auth_token} + else: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {wazuh_auth_token.text}" + ) + return {"connectionSuccessful": False, "authToken": None} + except Exception as e: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {e}" + ) + return {"connectionSuccessful": False, "authToken": None} + + def get_auth_token(self): + """ + Returns the authentication token for the Wazuh manager service. + + Returns: + str: Authentication token for the Wazuh manager service. + """ + return self.verify_connection()["authToken"] + + +class ShuffleConnector(Connector): + """ + This class represents a connector for the Shuffle service. It is a subclass of Connector. + + :param connector_name: A string that specifies the name of the connector. + """ + + def __init__(self, connector_name): + super().__init__(attributes=self.get_connector_info_from_db(connector_name)) + + def verify_connection(self): + """ + Verifies the connection to Shuffle service. + + Returns: + dict: A dictionary containing 'connectionSuccessful' status and 'apps' if the connection is successful. + """ + logger.info( + f"Verifying the shuffle connection to {self.attributes['connector_url']}" + ) + try: + headers = { + "Authorization": f"Bearer {self.attributes['connector_api_key']}" + } + shuffle_apps = requests.get( + f"{self.attributes['connector_url']}/api/v1/apps", + headers=headers, + verify=False, + ) + if shuffle_apps.status_code == 200: + logger.info( + f"Connection to {self.attributes['connector_url']} successful" + ) + return {"connectionSuccessful": True} + else: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {shuffle_apps.text}" + ) + return {"connectionSuccessful": False} + except Exception as e: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {e}" + ) + return {"connectionSuccessful": False} + + +class DfirIrisConnector(Connector): + """ + This class represents a connector for the DFIR IRIS service. It is a subclass of Connector. + + :param connector_name: A string that specifies the name of the connector. + """ + + def __init__(self, connector_name): + super().__init__(attributes=self.get_connector_info_from_db(connector_name)) + + def verify_connection(self): + """ + Verifies the connection to DFIR IRIS service. + + Returns: + dict: A dictionary containing 'connectionSuccessful' status and 'response' if the connection is successful. + """ + logger.info( + f"Verifying the dfir-iris connection to {self.attributes['connector_url']}" + ) + try: + headers = { + "Authorization": f"Bearer {self.attributes['connector_api_key']}" + } + dfir_iris = requests.get( + f"{self.attributes['connector_url']}/api/ping", + headers=headers, + verify=False, + ) + # See if 200 is returned + if dfir_iris.status_code == 200: + logger.info( + f"Connection to {self.attributes['connector_url']} successful" + ) + return {"connectionSuccessful": True} + else: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {dfir_iris.text}" + ) + return {"connectionSuccessful": False, "response": None} + except Exception as e: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {e}" + ) + return {"connectionSuccessful": False, "response": None} + + +class VelociraptorConnector(Connector): + """ + A connector for the Velociraptor service, a subclass of Connector. + + Args: + connector_name (str): The name of the connector. + """ + + def __init__(self, connector_name): + super().__init__(attributes=self.get_connector_info_from_db(connector_name)) + + def verify_connection(self): + """ + Verifies the connection to Velociraptor service. + + Returns: + dict: A dictionary containing 'connectionSuccessful' status and 'response' if the connection is successful. + """ + try: + connector_api_key = self.attributes["connector_api_key"] + + with open(connector_api_key, "r") as f: + api_key = f.read() + + try: + config = pyvelociraptor.LoadConfigFile(connector_api_key) + creds = grpc.ssl_channel_credentials( + root_certificates=config["ca_certificate"].encode("utf8"), + private_key=config["client_private_key"].encode("utf8"), + certificate_chain=config["client_cert"].encode("utf8"), + ) + + options = (("grpc.ssl_target_name_override", "VelociraptorServer"),) + + with grpc.secure_channel( + config["api_connection_string"], creds, options + ) as channel: + stub = api_pb2_grpc.APIStub(channel) + client_query = "SELECT * FROM info()" + + client_request = api_pb2.VQLCollectorArgs( + max_wait=60, + Query=[ + api_pb2.VQLRequest( + Name="ClientQuery", + VQL=client_query, + ), + ], + ) + + r = [] + for response in stub.Query(client_request): + if response.Response: + r = r + json.loads(response.Response) + return {"connectionSuccessful": True} + except Exception as e: + logger.error(f"Failed to verify connection to Velociraptor: {e}") + return {"connectionSuccessful": False, "response": None} + except Exception as e: + logger.error(f"Failed to get connector_api_key from the database: {e}") + return {"connectionSuccessful": False, "response": None} + + +class RabbitMQConnector(Connector): + """ + A connector for the RabbitMQ service, a subclass of Connector. + + Args: + connector_name (str): The name of the connector. + """ + + def __init__(self, connector_name): + super().__init__(attributes=self.get_connector_info_from_db(connector_name)) + + def verify_connection(self): + """ + Verifies the connection to RabbitMQ service. + """ + logger.info( + f"Verifying the rabbitmq connection to {self.attributes['connector_url']}" + ) + try: + # For the connector_url, strip out the host and port and use that for the connection + # This is because the connection string is not in the format that pika expects + connector_host, connector_port = self.attributes["connector_url"].split(":") + connector_port = int(connector_port) + + credentials = pika.PlainCredentials( + self.attributes["connector_username"], + self.attributes["connector_password"], + ) + parameters = pika.ConnectionParameters( + connector_host, + connector_port, + credentials=credentials, + ) + connection = pika.BlockingConnection(parameters) + if connection.is_open: + logger.info( + f"Connection to {self.attributes['connector_url']} successful" + ) + return {"connectionSuccessful": True} + else: + logger.error(f"Connection to {self.attributes['connector_url']} failed") + return {"connectionSuccessful": False, "response": None} + except Exception as e: + logger.error( + f"Connection to {self.attributes['connector_url']} failed with error: {e}" + ) + return {"connectionSuccessful": False, "response": None} + + +class ConnectorFactory: + """ + This class represents a factory for creating connector instances. + + :param creators: A dictionary mapping connector keys to their corresponding creator names. + """ + + def __init__(self): + """ + Initialize a new instance of the ConnectorFactory. + """ + self._creators = {} + + def register_creator(self, key, creator): + """ + Register a new connector creator. + + :param key: The key of the connector. + :param creator: The creator of the connector. + """ + self._creators[key] = creator + + def create(self, key, connector_name): + """ + Create a new connector instance. + + :param key: The key of the connector. + :param connector_name: The name of the connector. + + :return: A new instance of the connector. + + :raises ValueError: If the key is not found in the list of creators. + """ + creator = self._creators.get(key) + if not creator: + raise ValueError(key) + # use dynamic_import to get the class and initialize it + connector_class = dynamic_import("app.models.connectors", creator) + return connector_class(connector_name) + + +# Instantiate factory +connector_factory = ConnectorFactory() + + +# Register connector creators +connector_factory.register_creator("Wazuh-Indexer", "WazuhIndexerConnector") +connector_factory.register_creator("Graylog", "GraylogConnector") +connector_factory.register_creator("Wazuh-Manager", "WazuhManagerConnector") +connector_factory.register_creator("DFIR-IRIS", "DfirIrisConnector") +connector_factory.register_creator("Velociraptor", "VelociraptorConnector") +connector_factory.register_creator("RabbitMQ", "RabbitMQConnector") +connector_factory.register_creator("Shuffle", "ShuffleConnector") diff --git a/backend/app/models/models.py b/backend/app/models/models.py new file mode 100644 index 00000000..dd5746c8 --- /dev/null +++ b/backend/app/models/models.py @@ -0,0 +1,311 @@ +from datetime import datetime + +from loguru import logger +from sqlalchemy.dialects.postgresql import JSONB # Add this line + +from app import db +from app import ma + + +class ConnectorsAvailable(db.Model): + id = db.Column(db.Integer, primary_key=True) + connector_name = db.Column(db.String(100), unique=True) + connector_description = db.Column(db.String(100)) + connector_supports = db.Column(db.String(100)) + connector_configured = db.Column(db.Boolean, default=False) + connector_verified = db.Column(db.Boolean, default=False) + + def __init__(self, connector_name, connector_supports): + self.connector_name = connector_name + self.connector_supports = connector_supports + + def __repr__(self): + return f"" + + +class ConnectorsAvailableSchema(ma.Schema): + class Meta: + fields = ( + "id", + "connector_name", + "connector_description", + "connector_supports", + "connector_configured", + "connector_verified", + ) + + +connector_available_schema = ConnectorsAvailableSchema() +connectors_available_schema = ConnectorsAvailableSchema(many=True) + + +# Class for the connector which will store the endpoint url, connector name, connector type, connector last updated, +# username and password +class Connectors(db.Model): + id = db.Column(db.Integer, primary_key=True) + connector_name = db.Column(db.String(100), unique=True) + connector_type = db.Column(db.String(100)) + connector_url = db.Column(db.String(100)) + connector_last_updated = db.Column(db.DateTime, default=datetime.utcnow) + connector_username = db.Column(db.String(100)) + connector_password = db.Column(db.String(100)) + connector_api_key = db.Column(db.String(100)) + + def __init__( + self, + connector_name, + connector_type, + connector_url, + connector_username, + connector_password, + connector_api_key, + ): + self.connector_name = connector_name + self.connector_type = connector_type + self.connector_url = connector_url + self.connector_username = connector_username + self.connector_password = connector_password + # If the `connector_name` is `shuffle` or `dfir-irs` then set the `connector_api_key`. Otherwise set it to + # `None` + if ( + connector_name.lower() == "shuffle" + or connector_name.lower() == "dfir-irs" + or connector_name.lower() == "velociraptor" + ): + logger.info(f"Setting the API key for {connector_name}") + self.connector_api_key = connector_api_key + else: + logger.info(f"Not setting the API key for {connector_name}") + self.connector_api_key = None + + def __repr__(self): + return f"" + + +class ConnectorsSchema(ma.Schema): + class Meta: + fields = ( + "id", + "connector_name", + "connector_type", + "connector_url", + "connector_last_updated", + "connector_username", + "connector_password", + "connector_api_key", + ) + + +connector_schema = ConnectorsSchema() +connectors_schema = ConnectorsSchema(many=True) + + +# Class for the disabled rule IDs which will store the rule ID, previous configuration, new configuration, reason for +# disabling, date disabled, and the length of time the rule will be disabled for +# Path: backend\app\models.py +# class DisabledRules(db.Model): +# id = db.Column(db.Integer, primary_key=True) +# rule_id = db.Column(db.String(100)) +# previous_level = db.Column(db.String(1000)) +# new_level = db.Column(db.String(1000)) +# reason_for_disabling = db.Column(db.String(100)) +# date_disabled = db.Column(db.DateTime, default=datetime.utcnow) +# length_of_time = db.Column(db.Integer) + +# def __init__( +# self, +# rule_id, +# previous_level, +# new_level, +# reason_for_disabling, +# length_of_time, +# ): +# self.rule_id = rule_id +# self.previous_level = previous_level +# self.new_level = new_level +# self.reason_for_disabling = reason_for_disabling +# self.length_of_time = length_of_time + +# def __repr__(self): +# return f"" + + +# class DisabledRulesSchema(ma.Schema): +# class Meta: +# fields = ( +# "id", +# "rule_id", +# "previous_level", +# "new_level", +# "reason_for_disabling", +# "date_disabled", +# "length_of_time", +# ) + + +# disabled_rule_schema = DisabledRulesSchema() +# disabled_rules_schema = DisabledRulesSchema(many=True) + + +# Class for Wazuh Indexer allocation which stores disk stats and the host. +# Generate timestamp for each entry and invoke every 5 minutes. +# Path: backend\app\models.py +class WazuhIndexerAllocation(db.Model): + id = db.Column(db.Integer, primary_key=True) + node = db.Column(db.String(100)) + disk_used = db.Column(db.Float) + disk_available = db.Column(db.Float) + disk_total = db.Column(db.Float) + disk_percent = db.Column(db.Float) + timestamp = db.Column(db.DateTime, default=datetime.utcnow) + + def __init__( + self, + node, + disk_used, + disk_available, + disk_total, + disk_percent, + ): + self.node = node + self.disk_used = disk_used + self.disk_available = disk_available + self.disk_total = disk_total + self.disk_percent = disk_percent + + def __repr__(self): + return f"" + + +class WazuhIndexerAllocationSchema(ma.Schema): + class Meta: + fields = ( + "id", + "node", + "disk_used", + "disk_available", + "disk_total", + "disk_percent", + "timestamp", + ) + + +wazuh_indexer_allocation_schema = WazuhIndexerAllocationSchema() +wazuh_indexer_allocations_schema = WazuhIndexerAllocationSchema(many=True) + + +# Class for Graylog allocation which stores throughput metrics +# Generate timestamp for each entry and invoke every 5 minutes. +# Path: backend\app\models.py +class GraylogMetricsAllocation(db.Model): + id = db.Column(db.Integer, primary_key=True) + input_usage = db.Column(db.Float) + output_usage = db.Column(db.Float) + processor_usage = db.Column(db.Float) + input_1_sec_rate = db.Column(db.Float) + output_1_sec_rate = db.Column(db.Float) + total_input = db.Column(db.Float) + total_output = db.Column(db.Float) + timestamp = db.Column(db.DateTime, default=datetime.utcnow) + + def __init__( + self, + input_usage, + output_usage, + processor_usage, + input_1_sec_rate, + output_1_sec_rate, + total_input, + total_output, + ): + self.input_usage = input_usage + self.output_usage = output_usage + self.processor_usage = processor_usage + self.input_1_sec_rate = input_1_sec_rate + self.output_1_sec_rate = output_1_sec_rate + self.total_input = total_input + self.total_output = total_output + + def __repr__(self): + return f"" + + +class GraylogMetricsAllocationSchema(ma.Schema): + class Meta: + fields = ( + "id", + "input_usage", + "output_usage", + "processor_usage", + "input_1_sec_rate", + "output_1_sec_rate", + "total_input", + "total_output", + "timestamp", + ) + + +graylog_metrics_allocation_schema = GraylogMetricsAllocationSchema() +graylog_metrics_allocations_schema = GraylogMetricsAllocationSchema(many=True) + + +# Class for cases which stores the case ID, case name, list of agents +# Path: backend\app\models.py +class Case(db.Model): + id = db.Column(db.Integer, primary_key=True) + case_id = db.Column(db.Integer) + case_name = db.Column(db.String(100)) + agents = db.Column(db.String(1000)) + + def __init__(self, case_id, case_name, agents): + self.case_id = case_id + self.case_name = case_name + self.agents = agents + + def __repr__(self): + return f"" + + +class CaseSchema(ma.Schema): + class Meta: + fields = ( + "id", + "case_id", + "case_name", + "agents", + ) + + +case_schema = CaseSchema() +cases_schema = CaseSchema(many=True) + + +# Class for artifacts collected which stores the artifact name, artificat results (json), hostname +# Path: backend\app\models.py +class Artifact(db.Model): + id = db.Column(db.Integer, primary_key=True) + artifact_name = db.Column(db.String(100)) + artifact_results = db.Column(JSONB) + hostname = db.Column(db.String(100)) + + def __init__(self, artifact_name, artifact_results, hostname): + self.artifact_name = artifact_name + self.artifact_results = artifact_results + self.hostname = hostname + + def __repr__(self): + return f"" + + +class ArtifactSchema(ma.Schema): + class Meta: + fields = ( + "id", + "artifact_name", + "artifact_results", + "hostname", + ) + + +artifact_schema = ArtifactSchema() +artifacts_schema = ArtifactSchema(many=True) diff --git a/backend/app/models/rules.py b/backend/app/models/rules.py new file mode 100644 index 00000000..4955e695 --- /dev/null +++ b/backend/app/models/rules.py @@ -0,0 +1,54 @@ +from datetime import datetime + +from loguru import logger +from sqlalchemy.dialects.postgresql import JSONB # Add this line + +from app import db +from app import ma + + +# Class for the disabled rule IDs which will store the rule ID, previous configuration, new configuration, reason for +# disabling, date disabled, and the length of time the rule will be disabled for +# Path: backend\app\rules.py +class DisabledRules(db.Model): + id = db.Column(db.Integer, primary_key=True) + rule_id = db.Column(db.String(100)) + previous_level = db.Column(db.String(1000)) + new_level = db.Column(db.String(1000)) + reason_for_disabling = db.Column(db.String(100)) + date_disabled = db.Column(db.DateTime, default=datetime.utcnow) + length_of_time = db.Column(db.Integer) + + def __init__( + self, + rule_id, + previous_level, + new_level, + reason_for_disabling, + length_of_time, + ): + self.rule_id = rule_id + self.previous_level = previous_level + self.new_level = new_level + self.reason_for_disabling = reason_for_disabling + self.length_of_time = length_of_time + + def __repr__(self): + return f"" + + +class DisabledRulesSchema(ma.Schema): + class Meta: + fields = ( + "id", + "rule_id", + "previous_level", + "new_level", + "reason_for_disabling", + "date_disabled", + "length_of_time", + ) + + +disabled_rule_schema = DisabledRulesSchema() +disabled_rules_schema = DisabledRulesSchema(many=True) diff --git a/backend/app/routes/agents.py b/backend/app/routes/agents.py new file mode 100644 index 00000000..e19e971c --- /dev/null +++ b/backend/app/routes/agents.py @@ -0,0 +1,137 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.agents.agents import AgentService, AgentSyncService + +from app.services.WazuhManager.universal import UniversalService +from app.services.WazuhManager.agent import WazuhManagerAgentService +from app.services.WazuhManager.vulnerability import VulnerabilityService + + + +bp = Blueprint("agents", __name__) + + +@bp.route("/agents", methods=["GET"]) +def get_agents(): + """ + Endpoint to list all available agents. + It processes each agent to verify the connection and returns the results. + + Returns: + json: A JSON response containing the list of all available agents along with their connection verification status. + """ + service = AgentService() + universal_service = UniversalService() + auth_token = universal_service.get_auth_token() + return jsonify(auth_token) + agents = service.get_all_agents() + return agents + + +@bp.route("/agents/", methods=["GET"]) +def get_agent(agent_id): + """ + Endpoint to get the details of a agent. + + Args: + id (str): The id of the agent to be fetched. + + Returns: + json: A JSON response containing the details of the agent. + """ + service = AgentService() + agent = service.get_agent(agent_id) + return agent + + +@bp.route("/agents//critical", methods=["POST"]) +def mark_as_critical(agent_id): + """ + Endpoint to mark a agent as critical. + + Args: + id (str): The id of the agent to be marked as critical. + + Returns: + json: A JSON response containing the updated agent information. + """ + service = AgentService() + result = service.mark_agent_as_critical(agent_id) + return result + + +@bp.route("/agents//noncritical", methods=["POST"]) +def unmark_agent_critical(agent_id): + """ + Endpoint to unmark a agent as critical. + + Args: + id (str): The id of the agent to be unmarked as critical. + + Returns: + json: A JSON response containing the updated agent information. + """ + service = AgentService() + result = service.mark_agent_as_non_critical(agent_id) + return result + + +@bp.route("/agents/sync", methods=["POST"]) +def sync_agents(): + """ + Endpoint to sync all agents. + + Returns: + json: A JSON response containing the updated agent information. + """ + service = AgentSyncService() + result = service.sync_agents() + return jsonify(result) + + +@bp.route("/agents//delete", methods=["POST"]) +def delete_agent(agent_id): + """ + Endpoint to delete a agent. + + Args: + id (str): The id of the agent to be deleted. + + Returns: + json: A JSON response containing the updated agent information. + """ + service = AgentService() + result = service.delete_agent_db(agent_id) + + # Delete from WazuhManager + # Create instance of UniversalService + universal_service = UniversalService() + + # Pass universal_service to WazuhManagerAgentService + agent_service = WazuhManagerAgentService(universal_service) + agent_deleted = agent_service.delete_agent(agent_id) + + return result + + +@bp.route("/agents//vulnerabilities", methods=["GET"]) +def get_agent_vulnerabilities(agent_id): + """ + Endpoint to get the vulnerabilities of a agent. + + Args: + id (str): The id of the agent to be fetched. + + Returns: + json: A JSON response containing the vulnerabilities of the agent. + """ + # Create instance of UniversalService + universal_service = UniversalService() + + # Pass universal_service to VulnerabilityService + vulnerability_service = VulnerabilityService(universal_service) + + agent_vulnerabilities = vulnerability_service.agent_vulnerabilities(agent_id) + return agent_vulnerabilities diff --git a/backend/app/routes/alerts.py b/backend/app/routes/alerts.py new file mode 100644 index 00000000..4c87dd88 --- /dev/null +++ b/backend/app/routes/alerts.py @@ -0,0 +1,22 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.agents.agents import AgentService, AgentSyncService +from app.services.WazuhIndexer.alerts import AlertsService + +bp = Blueprint("alerts", __name__) + + +@bp.route("/alerts", methods=["GET"]) +def get_alerts(): + """ + Endpoint to list all available alerts. + It processes each alert to verify the connection and returns the results. + + Returns: + json: A JSON response containing the list of all available alerts along with their connection verification status. + """ + service = AlertsService() + alerts = service.collect_alerts() + return alerts diff --git a/backend/app/routes/connectors.py b/backend/app/routes/connectors.py new file mode 100644 index 00000000..80f1adf3 --- /dev/null +++ b/backend/app/routes/connectors.py @@ -0,0 +1,99 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.models import ( + ConnectorsAvailable, + Connectors, + connectors_available_schema, +) + +from app.services.connectors.connectors import ConnectorService +from app import db + +bp = Blueprint("connectors", __name__) + + +@bp.route("/connectors", methods=["GET"]) +def list_connectors_available(): + """ + Endpoint to list all available connectors. + It processes each connector to verify the connection and returns the results. + + Returns: + json: A JSON response containing the list of all available connectors along with their connection verification status. + """ + connectors_service = ConnectorService(db) + connectors = ConnectorsAvailable.query.all() + result = connectors_available_schema.dump(connectors) + + instantiated_connectors = [ + connectors_service.process_connector(connector["connector_name"]) + for connector in result + if connectors_service.process_connector(connector["connector_name"]) + ] + + return jsonify(instantiated_connectors) + + +@bp.route("/connectors/", methods=["GET"]) +def get_connector_details(id): + """ + Endpoint to get the details of a connector. + + Args: + id (str): The id of the connector to be fetched. + + Returns: + json: A JSON response containing the details of the connector. + """ + # Call service function instead of direct function call + service = ConnectorService(db) + connector_validated = service.validate_connector_exists( + int(id) + ) # convert id to integer + logger.info(connector_validated) + if connector_validated["success"] == False: + return jsonify(connector_validated), 404 + + # Fetch connector using the ID + connector = Connectors.query.get(id) + # Call service function instead of direct function call + instantiated_connector = service.process_connector(connector.connector_name) + return jsonify(instantiated_connector) + + +@bp.route("/connectors/", methods=["PUT"]) +def update_connector_route(id): + """ + Endpoint to update a connector. + + Args: + id (str): The id of the connector to be updated. + + Returns: + json: A JSON response containing the success status of the update operation and a message indicating the status. If the update operation was successful, it returns the connector name and the status of the connection verification. + """ + api_key_connector = ["Shuffle", "DFIR-IRIS", "Velociraptor"] + + request_data = request.get_json() + service = ConnectorService(db) + connector_validated = service.validate_connector_exists( + int(id) + ) # convert id to integer + logger.info(connector_validated) + if connector_validated["success"] == False: + return jsonify(connector_validated), 404 + + if connector_validated["connector_name"] in api_key_connector: + data_validated = service.validate_request_data_api_key(request_data) + if data_validated["success"] == False: + return jsonify(data_validated), 400 + else: + service.update_connector(int(id), request_data) + return service.verify_connector_connection(int(id)) + + data_validated = service.validate_request_data(request_data) + if data_validated["success"] == False: + return jsonify(data_validated), 400 + + service.update_connector(int(id), request_data) + return service.verify_connector_connection(int(id)) diff --git a/backend/app/routes/dfir_iris.py b/backend/app/routes/dfir_iris.py new file mode 100644 index 00000000..af77f954 --- /dev/null +++ b/backend/app/routes/dfir_iris.py @@ -0,0 +1,109 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.Graylog.messages import MessagesService +from app.services.Graylog.metrics import MetricsService +from app.services.Graylog.index import IndexService +from app.services.Graylog.inputs import InputsService +from app.services.DFIR_IRIS.cases import CasesService +from app.services.DFIR_IRIS.notes import NotesService +from app.services.DFIR_IRIS.assets import AssetsService +from app.services.DFIR_IRIS.alerts import AlertsService + +bp = Blueprint("dfir_iris", __name__) + + +@bp.route("/dfir_iris/cases", methods=["GET"]) +def get_cases(): + """ + Endpoint to collect cases from DFIR IRIS. + + Returns: + json: A JSON response containing the list of all the messages. + """ + service = CasesService() + cases = service.list_cases() + return cases + +@bp.route("/dfir_iris/cases/", methods=["GET"]) +def get_case(case_id): + """ + Endpoint to collect a specific case from DFIR IRIS. + + Returns: + json: A JSON response containing the list of all the messages. + """ + # Get the Case ID from the URL + service = CasesService() + case_id_exists = service.check_case_id(case_id=case_id) + if case_id_exists["success"] == False: + return case_id_exists + case = service.get_case(case_id=case_id) + return case + +@bp.route("/dfir_iris/cases//notes", methods=["GET"]) +def get_case_notes(case_id): + """ + Endpoint to collect notes from a specific case from DFIR IRIS. + + Returns: + json: A JSON response containing the list of all the messages. + """ + # Get the Case ID from the URL + case_service = CasesService() + notes_service = NotesService() + search_term = "%" + case_id_exists = case_service.check_case_id(case_id=case_id) + if case_id_exists["success"] == False: + return case_id_exists + notes = notes_service.get_case_notes(search_term=search_term, cid=int(case_id)) + return notes + +@bp.route("/dfir_iris/cases//note", methods=["POST"]) +def create_case_note(case_id): + """ + Endpoint to create notes from a specific case from DFIR IRIS. + + Returns: + json: A JSON response containing the list of all the messages. + """ + # Get the Case ID from the URL + note_title = request.json["note_title"] + note_content = request.json["note_content"] + case_service = CasesService() + notes_service = NotesService() + case_id_exists = case_service.check_case_id(case_id=case_id) + if case_id_exists["success"] == False: + return case_id_exists + created_note = notes_service.create_case_note(cid=int(case_id), note_title=note_title, note_content=note_content) + return created_note + +@bp.route("/dfir_iris/cases//assets", methods=["GET"]) +def get_case_assets(case_id): + """ + Endpoint to collect assets from a specific case from DFIR IRIS. + + Returns: + json: A JSON response containing the list of all the messages. + """ + asset_service = AssetsService() + case_service = CasesService() + + case_id_exists = case_service.check_case_id(case_id=case_id) + if case_id_exists["success"] == False: + return case_id_exists + assets = asset_service.get_case_assets(cid=int(case_id)) + return assets + +@bp.route("/dfir_iris/alerts", methods=["GET"]) +def get_alerts(): + """ + Endpoint to collect alerts from DFIR-IRIS + + Returns: + json: A JSON response containing the list of all the messages. + """ + service = AlertsService() + alerts = service.list_alerts() + return alerts diff --git a/backend/app/routes/graylog.py b/backend/app/routes/graylog.py new file mode 100644 index 00000000..b61463c0 --- /dev/null +++ b/backend/app/routes/graylog.py @@ -0,0 +1,85 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.Graylog.messages import MessagesService +from app.services.Graylog.metrics import MetricsService +from app.services.Graylog.index import IndexService +from app.services.Graylog.inputs import InputsService +from app.services.WazuhManager.wazuhmanager import WazuhManagerService + +bp = Blueprint("graylog", __name__) + + +@bp.route("/graylog/messages", methods=["GET"]) +def get_messages(): + """ + Endpoint to collect the latest 10 messages from Graylog. + + Returns: + json: A JSON response containing the list of all the messages. + """ + service = MessagesService() + messages = service.collect_messages() + return messages + + +@bp.route("/graylog/metrics", methods=["GET"]) +def get_metrics(): + """ + Endpoint to collect Graylog metrics. + + Returns: + json: A JSON response containing the list of all metrics + """ + service = MetricsService() + uncommitted_journal_size = service.collect_uncommitted_journal_size() + metrics = service.collect_throughput_metrics() + return jsonify( + {"uncommitted_journal_size": uncommitted_journal_size, "metrics": metrics} + ) + + +@bp.route("/graylog/indices", methods=["GET"]) +def get_indices(): + """ + Endpoint to collect Graylog indices. + + Returns: + json: A JSON response containing the list of all indices + """ + service = IndexService() + indices = service.collect_indices() + return indices + + +@bp.route("/graylog/indices//delete", methods=["DELETE"]) +def delete_index(index_name): + """ + Endpoint to delete a Graylog index. + + Args: + index_name (str): The name of the index to be deleted. + + Returns: + json: A JSON response containing the result of the deletion. + """ + service = IndexService() + result = service.delete_index(index_name) + return result + + +@bp.route("/graylog/inputs", methods=["GET"]) +def get_inputs(): + """ + Endpoint to collect Graylog inputs. + + Returns: + json: A JSON response containing the list of all inputs + """ + service = InputsService() + running_inputs = service.collect_running_inputs() + configured_inputs = service.collect_configured_inputs() + return jsonify( + {"running_inputs": running_inputs, "configured_inputs": configured_inputs} + ) diff --git a/backend/app/routes/index.py b/backend/app/routes/index.py new file mode 100644 index 00000000..07bf611b --- /dev/null +++ b/backend/app/routes/index.py @@ -0,0 +1,12 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.agents.agents import AgentService, AgentSyncService +from app.services.WazuhIndexer.alerts import AlertsService +from app.services.WazuhIndexer.cluster import ClusterService + +bp = Blueprint("indices", __name__) + + +@bp.route("/indices", methods=["GET"]) diff --git a/backend/app/routes/rules.py b/backend/app/routes/rules.py new file mode 100644 index 00000000..58a2690d --- /dev/null +++ b/backend/app/routes/rules.py @@ -0,0 +1,60 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.models.rules import DisabledRules + +from app.services.WazuhManager.wazuhmanager import WazuhManagerService + +from app.services.WazuhManager.universal import UniversalService + +from app.services.WazuhManager.disabled_rule import DisableRuleService + +from app.services.WazuhManager.enabled_rule import EnableRuleService + +bp = Blueprint("rules", __name__) + + +@bp.route("/rule/disable", methods=["POST"]) +def disable_rule(): + """ + Endpoint to disable a rule. + + Args: + id (str): The id of the rule to be disabled. + + Returns: + json: A JSON response containing the updated rule information. + """ + logger.info("Received request to disable rule") + data = request.get_json() + # wazuh_manager_connector = WazuhManagerConnector("Wazuh-Manager") + # wazuh_manager_service = WazuhManagerService(wazuh_manager_connector) + # result = wazuh_manager_service.disable_rule(data) + # Create instance of UniversalService + universal_service = UniversalService() + disable_service = DisableRuleService(universal_service) + result = disable_service.disable_rule(data) + return result + + +@bp.route("/rule/enable", methods=["POST"]) +def enable_rule(): + """ + Endpoint to enable a rule. + + Args: + id (str): The id of the rule to be enabled. + + Returns: + json: A JSON response containing the updated rule information. + """ + logger.info("Received request to enable rule") + data = request.get_json() + # wazuh_manager_connector = WazuhManagerConnector("Wazuh-Manager") + # wazuh_manager_service = WazuhManagerService(wazuh_manager_connector) + # result = wazuh_manager_service.enable_rule(data) + universal_service = UniversalService() + enable_service = EnableRuleService(universal_service) + result = enable_service.enable_rule(data) + return result diff --git a/backend/app/routes/shuffle.py b/backend/app/routes/shuffle.py new file mode 100644 index 00000000..009d11ee --- /dev/null +++ b/backend/app/routes/shuffle.py @@ -0,0 +1,50 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.agents.agents import AgentService, AgentSyncService +from app.services.Shuffle.workflows import WorkflowsService + +bp = Blueprint("shuffle", __name__) + + +@bp.route("/shuffle/workflows", methods=["GET"]) +def get_workflows(): + """ + Endpoint to list all available Shuffle workflows. + + Returns: + json: A JSON response containing the list of all configured Workflows. + """ + service = WorkflowsService() + workflows = service.collect_workflows() + return workflows + +@bp.route("/shuffle/workflows/executions", methods=["GET"]) +def get_workflows_executions(): + """ + Endpoint to list all available Shuffle workflow execution status. + + Returns: + json: A JSON response containing the list of all configured workflows last execution status. + """ + service = WorkflowsService() + workflow_details = service.collect_workflow_details() + if "workflows" not in workflow_details: + message = "No workflows found" + return jsonify({"message": message, "success": False}), 500 + for workflow in workflow_details["workflows"]: + workflow["status"] = service.collect_workflow_executions_status(workflow["workflow_id"]) + return workflow_details + +@bp.route("/shuffle/workflows/executions/", methods=["GET"]) +def get_workflow_executions(workflow_id): + """ + Endpoint to list execution status of a specified Shuffle workflow. + + Returns: + json: A JSON response containing the last execution status of the specified workflow. + """ + service = WorkflowsService() + workflow_details = service.collect_workflow_executions_status(workflow_id) + return workflow_details diff --git a/backend/app/routes/velociraptor.py b/backend/app/routes/velociraptor.py new file mode 100644 index 00000000..0d5c5d15 --- /dev/null +++ b/backend/app/routes/velociraptor.py @@ -0,0 +1,85 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.agents.agents import AgentService, AgentSyncService +from app.services.Velociraptor.artifacts import ArtifactsService +from app.services.Velociraptor.universal import UniversalService + +bp = Blueprint("velociraptor", __name__) + +@bp.route("/velociraptor/artifacts", methods=["GET"]) +def get_artifacts(): + """ + Endpoint to list all available artifacts. + It processes each artifact to verify the connection and returns the results. + + Returns: + json: A JSON response containing the list of all available artifacts along with their connection verification status. + """ + service = ArtifactsService() + artifacts = service.collect_artifacts() + return artifacts + +@bp.route("/velociraptor/artifacts/linux", methods=["GET"]) +def get_artifacts_linux(): + """ + Endpoint to list all available artifacts. + It processes each artifact to verify the connection and returns the results where the name + begins with `Linux`. + + Returns: + json: A JSON response containing the list of all available artifacts along with their connection verification status. + """ + service = ArtifactsService() + linux_artifacts = service.collect_artifacts_linux() + return linux_artifacts + +@bp.route("/velociraptor/artifacts/windows", methods=["GET"]) +def get_artifacts_windows(): + """ + Endpoint to list all available artifacts. + It processes each artifact to verify the connection and returns the results where the name + begins with `Windows`. + + Returns: + json: A JSON response containing the list of all available artifacts along with their connection verification status. + """ + service = ArtifactsService() + windows_artifacts = service.collect_artifacts_windows() + return windows_artifacts + +@bp.route("/velociraptor/artifacts/mac", methods=["GET"]) +def get_artifacts_mac(): + """ + Endpoint to list all available artifacts. + It processes each artifact to verify the connection and returns the results where the name + begins with `MacOS`. + + Returns: + json: A JSON response containing the list of all available artifacts along with their connection verification status. + """ + service = ArtifactsService() + mac_artifacts = service.collect_artifacts_macos() + return mac_artifacts + +@bp.route("/velociraptor/artifacts/collection", methods=["POST"]) +def collect_artifact(): + """ + Endpoint to collect an artifact. + It collects the artifact name and client name from the request body and returns the results. + + Returns: + json: A JSON response containing the list of all available artifacts along with their connection verification status. + """ + req_data = request.get_json() + artifact_name = req_data["artifact_name"] + client_name = req_data["client_name"] + service = UniversalService() + client_id = service.get_client_id(client_name=client_name)["results"][0]["client_id"] + if client_id is None: + return jsonify({"message": f"{client_name} has not been seen in the last 30 seconds and may not be online with the Velociraptor server.", "success": False}), 500 + + artifact_service = ArtifactsService() + artifact_results = artifact_service.run_artifact_collection(client_id=client_id, artifact=artifact_name) + return artifact_results diff --git a/backend/app/routes/wazuhindexer.py b/backend/app/routes/wazuhindexer.py new file mode 100644 index 00000000..9f926471 --- /dev/null +++ b/backend/app/routes/wazuhindexer.py @@ -0,0 +1,75 @@ +from flask import Blueprint, jsonify, request +from loguru import logger +from app.models.connectors import Connector, WazuhManagerConnector + +from app.services.agents.agents import AgentService, AgentSyncService +from app.services.WazuhIndexer.alerts import AlertsService +from app.services.WazuhIndexer.index import IndexService +from app.services.WazuhIndexer.cluster import ClusterService + +bp = Blueprint("wazuh_indexer", __name__) + + +@bp.route("/wazuh_indexer/indices", methods=["GET"]) +def get_indices_summary(): + """ + Endpoint to list all available indices and collect. + { + "index": index["index"], + "health": index["health"], + "docs_count": index["docs.count"], + "store_size": index["store.size"], + "replica_count": index["rep"], + }, + It processes each alert to verify the connection and returns the results. + + Returns: + json: A JSON response containing the list of all available indices along with their connection verification status. + """ + service = IndexService() + indices = service.collect_indices_summary() + return indices + +@bp.route("/wazuh_indexer/allocation", methods=["GET"]) +def get_node_allocation(): + """ + Endpoint to list all available indices allocation. + Returns: + { + "disk_used": index["disk.used"], + "disk_available": index["disk.avail"], + "disk_total": index["disk.total"], + "disk_percent": index["disk.percent"], + "node": index["node"], + }, + + Returns: + json: A JSON response containing the list of all available alerts along with their connection verification status. + """ + service = ClusterService() + indices = service.collect_node_allocation() + return indices + +@bp.route("/wazuh_indexer/health", methods=["GET"]) +def get_cluster_health(): + """ + Endpoint to collect Wazuh-Indexer cluster health. + + Returns: + json: A JSON response containing the list of all available alerts along with their connection verification status. + """ + service = ClusterService() + indices = service.collect_cluster_health() + return indices + +@bp.route("/wazuh_indexer/shards", methods=["GET"]) +def get_shards(): + """ + Endpoint to collect Wazuh-Indexer shards. + + Returns: + json: A JSON response containing the list of all available alerts along with their connection verification status. + """ + service = ClusterService() + indices = service.collect_shards() + return indices diff --git a/backend/app/services/DFIR_IRIS/alerts.py b/backend/app/services/DFIR_IRIS/alerts.py new file mode 100644 index 00000000..f4b41934 --- /dev/null +++ b/backend/app/services/DFIR_IRIS/alerts.py @@ -0,0 +1,45 @@ +from typing import Dict +import requests +from loguru import logger +from app.services.DFIR_IRIS.universal import UniversalService +from dfir_iris_client.helper.utils import assert_api_resp +from dfir_iris_client.helper.utils import get_data_from_resp +from dfir_iris_client.alert import Alert + + +class AlertsService: + """ + A service class that encapsulates the logic for pulling alerts from DFIR-IRIS. + """ + + def __init__(self): + self.universal_service = UniversalService("DFIR-IRIS") + session_result = self.universal_service.create_session() + + if not session_result['success']: + logger.error(session_result['message']) + self.iris_session = None + else: + self.iris_session = session_result['session'] + + def list_alerts(self) -> Dict[str, object]: + """ + Lists all alerts from DFIR-IRIS + + Returns: + dict: A dictionary containing the success status, a message and potentially the cases. + """ + if self.iris_session is None: + return { + "success": False, + "message": "DFIR-IRIS session was not successfully created.", + } + + logger.info("Collecting cases from DFIR-IRIS") + alert = Alert(session=self.iris_session) + result = self.universal_service.fetch_and_parse_data(self.iris_session, alert.filter_alerts) + + if not result["success"]: + return {"success": False, "message": "Failed to collect cases from DFIR-IRIS"} + + return {"success": True, "message": "Successfully collected cases from DFIR-IRIS", "results": result["data"]} diff --git a/backend/app/services/DFIR_IRIS/assets.py b/backend/app/services/DFIR_IRIS/assets.py new file mode 100644 index 00000000..a1a9a217 --- /dev/null +++ b/backend/app/services/DFIR_IRIS/assets.py @@ -0,0 +1,47 @@ +from typing import Dict +import requests +from loguru import logger +from app.services.DFIR_IRIS.universal import UniversalService +from dfir_iris_client.case import Case +from dfir_iris_client.helper.utils import assert_api_resp +from dfir_iris_client.helper.utils import get_data_from_resp +from dfir_iris_client.session import ClientSession + + +class AssetsService: + """ + A service class that encapsulates the logic for pulling case assets from DFIR-IRIS. + """ + + def __init__(self): + self.universal_service = UniversalService("DFIR-IRIS") + session_result = self.universal_service.create_session() + + if not session_result['success']: + logger.error(session_result['message']) + self.iris_session = None + else: + self.iris_session = session_result['session'] + + def get_case_assets(self, cid: int) -> Dict[str, object]: + """ + Gets a case's assets from DFIR-IRIS + + ARGS: + cid: The case ID to search for + + Returns: + dict: A dictionary containing the success status, a message and potentially the notes of a given case. + """ + if self.iris_session is None: + return {"success": False, "message": "DFIR-IRIS session was not successfully created."} + + logger.info(f"Collecting case {cid} assets from DFIR-IRIS") + case = Case(session=self.iris_session) + result = self.universal_service.fetch_and_parse_data(self.iris_session, case.list_assets, cid) + + if not result["success"]: + return {"success": False, "message": "Failed to collect notes from DFIR-IRIS"} + + return result + diff --git a/backend/app/services/DFIR_IRIS/cases.py b/backend/app/services/DFIR_IRIS/cases.py new file mode 100644 index 00000000..dcd9656c --- /dev/null +++ b/backend/app/services/DFIR_IRIS/cases.py @@ -0,0 +1,80 @@ +from typing import Dict +import requests +from loguru import logger +from app.services.DFIR_IRIS.universal import UniversalService +from dfir_iris_client.case import Case +from dfir_iris_client.helper.utils import assert_api_resp +from dfir_iris_client.helper.utils import get_data_from_resp +from dfir_iris_client.session import ClientSession + + +class CasesService: + """ + A service class that encapsulates the logic for pulling cases from DFIR-IRIS. + """ + + def __init__(self): + self.universal_service = UniversalService("DFIR-IRIS") + session_result = self.universal_service.create_session() + + if not session_result['success']: + logger.error(session_result['message']) + self.iris_session = None + else: + self.iris_session = session_result['session'] + + def list_cases(self) -> Dict[str, object]: + """ + Lists all cases from DFIR-IRIS + + Returns: + dict: A dictionary containing the success status, a message and potentially the cases. + """ + if self.iris_session is None: + return { + "success": False, + "message": "DFIR-IRIS session was not successfully created.", + } + + logger.info("Collecting cases from DFIR-IRIS") + case = Case(session=self.iris_session) + result = self.universal_service.fetch_and_parse_data(self.iris_session, case.list_cases) + + if not result["success"]: + return {"success": False, "message": "Failed to collect cases from DFIR-IRIS"} + + return {"success": True, "message": "Successfully collected cases from DFIR-IRIS", "cases": result["data"]} + + def get_case(self, case_id: int) -> bool: + """ + Gets a case from DFIR-IRIS and returns all the details + + Returns: + dict: A dictionary containing the success status, a message and potentially the case. + """ + if self.iris_session is None: + return {"success": False, "message": "DFIR-IRIS session was not successfully created."} + + logger.info(f"Collecting case {case_id} from DFIR-IRIS") + case = Case(session=self.iris_session) + result = self.universal_service.fetch_and_parse_data(self.iris_session, case.get_case, case_id) + + if not result["success"]: + return {"success": False, "message": f"Failed to collect case {case_id} from DFIR-IRIS"} + + return {"success": True, "message": f"Successfully collected case {case_id} from DFIR-IRIS", "case": result["data"]} + + def check_case_id(self, case_id: int) -> bool: + """ + Checks if a case exists in DFIR-IRIS + + Returns: + dict: A dictionary containing the success status, a message and potentially the case. + """ + return self.get_case(case_id) + + + + + + diff --git a/backend/app/services/DFIR_IRIS/notes.py b/backend/app/services/DFIR_IRIS/notes.py new file mode 100644 index 00000000..7018eba4 --- /dev/null +++ b/backend/app/services/DFIR_IRIS/notes.py @@ -0,0 +1,108 @@ +from typing import Dict +import requests +from loguru import logger +from app.services.DFIR_IRIS.universal import UniversalService +from dfir_iris_client.case import Case +from dfir_iris_client.helper.utils import assert_api_resp +from dfir_iris_client.helper.utils import get_data_from_resp +from dfir_iris_client.session import ClientSession + + +class NotesService: + """ + A service class that encapsulates the logic for pulling case notes from DFIR-IRIS. + """ + + def __init__(self): + self.universal_service = UniversalService("DFIR-IRIS") + session_result = self.universal_service.create_session() + + if not session_result['success']: + logger.error(session_result['message']) + self.iris_session = None + else: + self.iris_session = session_result['session'] + + def get_case_notes(self, search_term: str, cid: int) -> Dict[str, object]: + """ + Gets a case's notes from DFIR-IRIS and return the ID and Title + + ARGS: + cid: The case ID to search for + search_term: The search term to use + + Returns: + dict: A dictionary containing the success status, a message and potentially the notes of a given case. + """ + if self.iris_session is None: + return {"success": False, "message": "DFIR-IRIS session was not successfully created."} + + logger.info(f"Collecting case {cid} from DFIR-IRIS") + case = Case(session=self.iris_session) + result = self.universal_service.fetch_and_parse_data(self.iris_session, case.search_notes, search_term, cid) + + if not result["success"]: + return {"success": False, "message": "Failed to collect notes from DFIR-IRIS"} + + # Loop through the notes and get the details + for note in result['data']: + note_details = self._get_case_note_details(note['note_id'], cid) + if not note_details['success']: + return {"success": False, "message": "Failed to collect notes from DFIR-IRIS"} + note['note_details'] = note_details['notes'] + + return result + + def _get_case_note_details(self, note_id: int, cid: int) -> Dict[str, object]: + """ + Gets a case's notes from DFIR-IRIS and returns the note details such as the content + + ARGS: + cid: The case ID to search for + note_id: The note ID to search for + + Returns: + dict: A dictionary containing the success status, a message and potentially the notes of a given case. + """ + if self.iris_session is None: + return {"success": False, "message": "DFIR-IRIS session was not successfully created."} + + logger.info(f"Collecting case {cid} from DFIR-IRIS") + case = Case(session=self.iris_session) + result = self.universal_service.fetch_and_parse_data(self.iris_session, case.get_note, note_id, cid) + + if not result["success"]: + return {"success": False, "message": "Failed to collect notes from DFIR-IRIS"} + + return {"success": True, "message": "Successfully collected notes from DFIR-IRIS", "notes": result["data"]} + + def create_case_note(self, cid: int, note_title: str, note_content: str) -> Dict[str, object]: + """ + Creates a case note in DFIR-IRIS + + ARGS: + cid: The case ID to search for + title: The title of the note + content: The content of the note + + Returns: + dict: A dictionary containing the success status, a message and potentially the notes of a given case. + """ + if self.iris_session is None: + return {"success": False, "message": "DFIR-IRIS session was not successfully created."} + + logger.info(f"Creating case {cid} note in DFIR-IRIS") + case = Case(session=self.iris_session) + # Creating Group for New Note + note_group = self.universal_service.fetch_and_parse_data(self.iris_session, case.add_notes_group, note_title, cid) + + if not note_group["success"]: + return {"success": False, "message": "Failed to create note in DFIR-IRIS"} + note_group_id = note_group['data']['group_id'] + custom_attributes = {} + result = self.universal_service.fetch_and_parse_data(self.iris_session, case.add_note, note_title, note_content, note_group_id, custom_attributes, cid) + + if not result["success"]: + return {"success": False, "message": "Failed to create note in DFIR-IRIS"} + + return {"success": True, "message": "Successfully created note in DFIR-IRIS", "notes": result["data"]} diff --git a/backend/app/services/DFIR_IRIS/universal.py b/backend/app/services/DFIR_IRIS/universal.py new file mode 100644 index 00000000..8e91ef2a --- /dev/null +++ b/backend/app/services/DFIR_IRIS/universal.py @@ -0,0 +1,104 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from typing import Dict, List +from app import db +from datetime import datetime +import requests +from loguru import logger +from elasticsearch7 import Elasticsearch +from app.models.connectors import connector_factory, Connector +import dfir_iris_client +from dfir_iris_client.session import ClientSession +from typing import Any +from typing import Dict +from typing import Optional +from typing import Set +from typing import Tuple +from dfir_iris_client.case import Case +from dfir_iris_client.helper.utils import assert_api_resp +from dfir_iris_client.helper.utils import get_data_from_resp + + +class UniversalService: + """ + A service class that encapsulates the logic for polling messages from DFIR-IRIS. + """ + + def __init__(self, connector_name: str) -> None: + self.connector_url, self.connector_api_key = self.collect_iris_details(connector_name) + + def collect_iris_details(self, connector_name: str): + """ + Collects the details of the DFIR-IRIS connector. + + Args: + connector_name (str): The name of the DFIR-IRIS connector. + + Returns: + tuple: A tuple containing the connection URL, and api key. + """ + connector_instance = connector_factory.create(connector_name, connector_name) + connection_successful = connector_instance.verify_connection() + if connection_successful: + connection_details = Connector.get_connector_info_from_db(connector_name) + return ( + connection_details.get("connector_url"), + connection_details.get("connector_api_key"), + ) + else: + return None, None + + def create_session(self) -> Optional[ClientSession]: + """ + Create a session with DFIR-IRIS. + + This method creates a session with DFIR-IRIS and returns the session object. + If a session cannot be established, an error is logged and None is returned. + + Returns: + session: A session object for DFIR-IRIS. + """ + try: + logger.info("Creating session with DFIR-IRIS.") + session = ClientSession( + host=self.connector_url, + apikey=self.connector_api_key, + agent="iris-client", + ssl_verify=False, + timeout=120, + proxy=None, + ) + logger.info("Session created.") + return {"success": True, "session": session} + except Exception as e: + logger.error(f"Error creating session with DFIR-IRIS: {e}") + return { + "success": False, + "message": "Connection to DFIR-IRIS unsuccessful.", + } + + def fetch_and_parse_data(self, session, action, *args): + """ + General method to fetch and parse data from DFIR-IRIS. + + Args: + session: ClientSession object. + action: callable, the action to be performed (e.g., list_cases or get_case) + args: arguments for the action callable + + Returns: + dict: A dictionary containing the data and a success status. + """ + try: + logger.info(f"Executing {action.__name__}... on args: {args}") + status = action(*args) + assert_api_resp(status, soft_fail=False) + data = get_data_from_resp(status) + logger.info(f"Successfully executed {action.__name__}") + return {"success": True, "data": data} + except Exception as err: + logger.error(f"Failed to execute {action.__name__}: {err}") + return {"success": False} diff --git a/backend/app/services/Graylog/index.py b/backend/app/services/Graylog/index.py new file mode 100644 index 00000000..8b990779 --- /dev/null +++ b/backend/app/services/Graylog/index.py @@ -0,0 +1,148 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from typing import Dict, List +from app import db +from datetime import datetime +import requests +from loguru import logger +from app.models.connectors import connector_factory, Connector, GraylogConnector +from app.services.Graylog.universal import UniversalService + + +class IndexService: + """ + A service class that encapsulates the logic for pulling index data from Graylog + """ + + HEADERS: Dict[str, str] = {"X-Requested-By": "CoPilot"} + + def __init__(self): + ( + self.connector_url, + self.connector_username, + self.connector_password, + ) = UniversalService().collect_graylog_details("Graylog") + + def collect_indices(self): + """ + Collects the indices that are managed by Graylog. + + Returns: + list: A list containing the indices. + """ + if ( + self.connector_url is None + or self.connector_username is None + or self.connector_password is None + ): + return {"message": "Failed to collect Graylog details", "success": False} + + managed_indices = self._collect_managed_indices() + + if managed_indices["success"]: + index_names = self._extract_index_names(managed_indices) + managed_indices["index_names"] = index_names + + return managed_indices + + def _collect_managed_indices(self) -> Dict[str, object]: + """ + Collects the indices that are managed by Graylog. + + Returns: + dict: A dictionary containing the success status, a message and potentially the indices. + """ + try: + managed_indices = requests.get( + f"{self.connector_url}/api/system/indexer/indices", + headers=self.HEADERS, + auth=(self.connector_username, self.connector_password), + verify=False, + ) + return { + "message": "Successfully collected managed indices", + "success": True, + "indices": managed_indices.json()["all"]["indices"], + } + except Exception as e: + logger.error(f"Failed to collect managed indices: {e}") + return {"message": "Failed to collect managed indices", "success": False} + + def _extract_index_names(self, response: Dict[str, object]) -> List[str]: + """ + Extracts index names from the provided response. + + Args: + response (dict): The dictionary containing the response. + + Returns: + list: A list containing the index names. + """ + index_names = list(response.get("indices", {}).keys()) + return index_names + + def delete_index(self, index_name: str) -> Dict[str, object]: + """ + Deletes the specified index from Graylog. + + Args: + index_name (str): The name of the index to delete. + + Returns: + dict: A dictionary containing the response. + """ + logger.info(f"Deleting index {index_name} from Graylog") + if ( + self.connector_url is None + or self.connector_username is None + or self.connector_password is None + ): + return {"message": "Failed to collect Graylog details", "success": False} + + # Check if the index exists in Graylog + managed_indices = self._collect_managed_indices() + if managed_indices["success"]: + index_names = self._extract_index_names(managed_indices) + if index_name not in index_names: + return { + "message": f"Index {index_name} is not managed by Graylog", + "success": False, + } + # Invoke _delete_index + return self._delete_index(index_name) + + return { + "message": f"Failed to delete index {index_name} from Graylog", + "success": False, + } + + def _delete_index(self, index_name: str) -> Dict[str, object]: + """ + Deletes the specified index from Graylog. + + Args: + index_name (str): The name of the index to delete. + + Returns: + dict: A dictionary containing the response. + """ + try: + delete_index_response = requests.delete( + f"{self.connector_url}/api/system/indexer/indices/{index_name}", + headers=self.HEADERS, + auth=(self.connector_username, self.connector_password), + verify=False, + ) + return { + "message": f"Successfully deleted index {index_name} from Graylog", + "success": True, + } + except Exception as e: + logger.error(f"Failed to delete index {index_name} from Graylog: {e}") + return { + "message": f"Failed to delete index {index_name} from Graylog. If this is the current index, it cannot be deleted.", + "success": False, + } diff --git a/backend/app/services/Graylog/inputs.py b/backend/app/services/Graylog/inputs.py new file mode 100644 index 00000000..24fd4283 --- /dev/null +++ b/backend/app/services/Graylog/inputs.py @@ -0,0 +1,128 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from typing import Dict, List +from app import db +from datetime import datetime +import requests +from loguru import logger +from app.models.connectors import connector_factory, Connector, GraylogConnector +from app.services.Graylog.universal import UniversalService + + +class InputsService: + """ + A service class that encapsulates the logic for pulling index data from Graylog + """ + + HEADERS: Dict[str, str] = {"X-Requested-By": "CoPilot"} + + def __init__(self): + ( + self.connector_url, + self.connector_username, + self.connector_password, + ) = UniversalService().collect_graylog_details("Graylog") + + def collect_running_inputs(self): + """ + Collects the running inputs that are managed by Graylog. + + Returns: + list: A list containing the inputs. + """ + if ( + self.connector_url is None + or self.connector_username is None + or self.connector_password is None + ): + return {"message": "Failed to collect Graylog details", "success": False} + + running_inputs = self._collect_running_inputs() + + if running_inputs["success"]: + return running_inputs + + def _collect_running_inputs(self) -> Dict[str, object]: + """ + Collects the running inputs that are managed by Graylog. + + Returns: + dict: A dictionary containing the success status, a message and potentially the inputs. + """ + try: + running_inputs = requests.get( + f"{self.connector_url}/api/system/inputstates", + headers=self.HEADERS, + auth=(self.connector_username, self.connector_password), + verify=False, + ) + inputs_list = [] + for input in running_inputs.json()["states"]: + inputs_list.append( + { + "state": input["state"], + "title": input["message_input"]["title"], + "port": input["message_input"]["attributes"]["port"], + }, + ) + return { + "message": "Successfully collected running inputs", + "success": True, + "inputs": inputs_list, + } + except Exception as e: + logger.error(f"Failed to collect running inputs: {e}") + return {"message": "Failed to collect running inputs", "success": False} + + def collect_configured_inputs(self): + """ + Collects the configured inputs that are managed by Graylog. + + Returns: + list: A list containing the inputs. + """ + if ( + self.connector_url is None + or self.connector_username is None + or self.connector_password is None + ): + return {"message": "Failed to collect Graylog details", "success": False} + + configured_inputs = self._collect_configured_inputs() + + if configured_inputs["success"]: + return configured_inputs + + def _collect_configured_inputs(self) -> Dict[str, object]: + """ + Collects the configured inputs that are managed by Graylog. + + Returns: + dict: A dictionary containing the success status, a message and potentially the inputs. + """ + try: + configured_inputs = requests.get( + f"{self.connector_url}/api/system/inputs", + headers=self.HEADERS, + auth=(self.connector_username, self.connector_password), + verify=False, + ) + configured_inputs_list = [] + for input in configured_inputs.json()["inputs"]: + configured_inputs_list.append( + { + "title": input["title"], + "port": input["attributes"]["port"], + }, + ) + return { + "message": "Successfully collected configured inputs", + "success": True, + "configured_inputs": configured_inputs_list, + } + except Exception as e: + logger.error(f"Failed to collect configured inputs: {e}") + return {"message": "Failed to collect configured inputs", "success": False} diff --git a/backend/app/services/Graylog/messages.py b/backend/app/services/Graylog/messages.py new file mode 100644 index 00000000..2e135a28 --- /dev/null +++ b/backend/app/services/Graylog/messages.py @@ -0,0 +1,70 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from app import db +from datetime import datetime +import requests +from loguru import logger +from app.models.connectors import connector_factory, Connector, GraylogConnector +from app.services.Graylog.universal import UniversalService + + +class MessagesService: + """ + A service class that encapsulates the logic for polling messages from Graylog. + """ + + def collect_messages(self): + """ + Collects the latest 10 messages from Graylog. + + Returns: + list: A list containing the messages. + """ + ( + connector_url, + connector_username, + connector_password, + ) = UniversalService().collect_graylog_details("Graylog") + if ( + connector_url is None + or connector_username is None + or connector_password is None + ): + return {"message": "Failed to collect Graylog details", "success": False} + else: + try: + # Get the Graylog Journal Messages where a parameter of `page` is passed with an integer value of `1` + page_number = 1 + graylog_messages = requests.get( + f"{connector_url}/api/system/messages?page={page_number}", + auth=(connector_username, connector_password), + verify=False, + ) + # If the response is successful, return the messages as a list + if graylog_messages.status_code == 200: + logger.info( + f"Received {len(graylog_messages.json()['messages'])} messages from Graylog" + ) + return { + "message": "Successfully retrieved messages", + "success": True, + "graylog_messages": graylog_messages.json()["messages"], + } + # Otherwise, return an error message + else: + logger.error( + f"Failed to collect messages from Graylog: {graylog_messages.json()}" + ) + return { + "message": "Failed to collect messages from Graylog", + "success": False, + } + except Exception as e: + logger.error(f"Failed to collect messages from Graylog: {e}") + return { + "message": "Failed to collect messages from Graylog", + "success": False, + } diff --git a/backend/app/services/Graylog/metrics.py b/backend/app/services/Graylog/metrics.py new file mode 100644 index 00000000..6cf479f8 --- /dev/null +++ b/backend/app/services/Graylog/metrics.py @@ -0,0 +1,227 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from typing import Dict, List +from app import db +from datetime import datetime +import requests +from loguru import logger +from app.models.connectors import connector_factory, Connector, GraylogConnector +from app.services.Graylog.universal import UniversalService + + +class MetricsService: + """ + A service class that encapsulates the logic for pulling metrics from Graylog. + """ + + METRIC_NAMES: Dict[str, str] = { + "org.graylog2.throughput.input.1-sec-rate": "input_1_sec_rate", + "org.graylog2.throughput.output.1-sec-rate": "output_1_sec_rate", + "org.graylog2.buffers.input.usage": "input_usage", + "org.graylog2.buffers.output.usage": "output_usage", + "org.graylog2.buffers.process.usage": "processor_usage", + "org.graylog2.throughput.input": "total_input", + "org.graylog2.throughput.output": "total_output", + } + + HEADERS: Dict[str, str] = {"X-Requested-By": "CoPilot"} + + def collect_uncommitted_journal_size(self): + """ + Collects the journal size of uncommitted messages from Graylog. + + Returns: + list: A list containing the metrics. + """ + ( + connector_url, + connector_username, + connector_password, + ) = UniversalService().collect_graylog_details("Graylog") + if ( + connector_url is None + or connector_username is None + or connector_password is None + ): + return {"message": "Failed to collect Graylog details", "success": False} + else: + journal_size = self._collect_metrics_uncommitted_journal_size( + connector_url, connector_username, connector_password + ) + + if journal_size["success"] is False: + return journal_size + return journal_size + + def collect_throughput_metrics(self): + """ + Collects the following Graylog Metrics: + - Input Usage + - Output Usage + - Processor Usage + - Input 1 Seconds Rate + - Output 1 Seconds Rate + - Total Input + - Total Output + + Returns: + list: A list containing the metrics. + """ + ( + connector_url, + connector_username, + connector_password, + ) = UniversalService().collect_graylog_details("Graylog") + if ( + connector_url is None + or connector_username is None + or connector_password is None + ): + return {"message": "Failed to collect Graylog details", "success": False} + else: + throughput_usage = self._collect_metrics_throughput_usage( + connector_url, connector_username, connector_password + ) + + if throughput_usage["success"] is False: + return throughput_usage + return throughput_usage + + def _collect_metrics_uncommitted_journal_size( + self, connector_url: str, connector_username: str, connector_password: str + ): + """ + Collects the journal size of uncommitted messages from Graylog. + + Args: + connector_url (str): The URL of the Graylog connector. + connector_username (str): The username of the Graylog connector. + connector_password (str): The password of the Graylog connector. + + Returns: + int: The journal size. + """ + try: + logger.info("Collecting journal size from Graylog") + headers = {"X-Requested-By": "CoPilot"} + # Get the Graylog Journal Size + uncommitted_journal_size_response = requests.get( + f"{connector_url}/api/system/journal", + headers=headers, + auth=(connector_username, connector_password), + verify=False, + ) + uncommitted_journal_size = uncommitted_journal_size_response.json() + + logger.info( + f"Received {uncommitted_journal_size} uncommitted journal entries from Graylog" + ) + return { + "message": "Successfully retrieved journal size", + "success": True, + "uncommitted_journal_entries": uncommitted_journal_size.get( + "uncommitted_journal_entries", 0 + ), + } + except Exception as e: + logger.error(f"Failed to collect journal size from Graylog: {e}") + return { + "message": "Failed to collect journal size from Graylog", + "success": False, + } + + def _collect_metrics_throughput_usage( + self, connector_url: str, connector_username: str, connector_password: str + ) -> Dict[str, object]: + """ + Collects throughput usage from Graylog. + + Args: + connector_url (str): The URL of the Graylog connector. + connector_username (str): The username of the Graylog connector. + connector_password (str): The password of the Graylog connector. + + Returns: + dict: A dictionary containing the throughput usage. + """ + logger.info("Collecting throughput usage from Graylog") + + try: + throughput_metrics = self._make_throughput_api_call( + connector_url, self.HEADERS, connector_username, connector_password + ) + return self._parse_throughput_metrics(throughput_metrics) + except Exception as e: + logger.error(f"Failed to collect throughput usage from Graylog: {e}") + return { + "message": "Failed to collect throughput usage from Graylog", + "success": False, + } + + def _make_throughput_api_call( + self, + connector_url: str, + headers: Dict[str, str], + connector_username: str, + connector_password: str, + ) -> Dict[str, object]: + """ + Makes Throughput API call to Graylog. + + Args: + connector_url (str): The URL of the Graylog connector. + headers (dict): The headers for the request. + connector_username (str): The username of the Graylog connector. + connector_password (str): The password of the Graylog connector. + + Returns: + dict: The dictionary containing throughput metrics. + """ + throughput = requests.get( + f"{connector_url}/api/system/metrics", + headers=headers, + auth=(connector_username, connector_password), + verify=False, + ) + throughput_json = throughput.json() + throughput_metrics = throughput_json["gauges"] + input_output_throughput = throughput_json["counters"] + + throughput_metrics.update(input_output_throughput) # Merge the two dictionaries + return throughput_metrics + + def _parse_throughput_metrics( + self, throughput_metrics: Dict[str, object] + ) -> Dict[str, object]: + """ + Parses throughput metrics. + + Args: + throughput_metrics (dict): The dictionary containing throughput metrics. + + Returns: + dict: The dictionary with parsed throughput metrics. + """ + throughput_metrics_list = [] + results = {} + + for metric, data in throughput_metrics.items(): + if metric in self.METRIC_NAMES: + value = data["value"] if "value" in data else data["count"] + throughput_metrics_list.append({"metric": metric, "value": value}) + + variable_name = self.METRIC_NAMES.get(metric) + if variable_name is not None: + results[variable_name] = value + + logger.info( + f"Received throughput usage from Graylog: {throughput_metrics_list}" + ) + return { + "message": "Successfully retrieved throughput usage", + "success": True, + "throughput_metrics": throughput_metrics_list, + } diff --git a/backend/app/services/Graylog/universal.py b/backend/app/services/Graylog/universal.py new file mode 100644 index 00000000..d1857087 --- /dev/null +++ b/backend/app/services/Graylog/universal.py @@ -0,0 +1,41 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from app import db +from datetime import datetime +import requests +from loguru import logger +from app.models.connectors import connector_factory, Connector, GraylogConnector + + +class UniversalService: + """ + A service class that encapsulates the logic for polling messages from Graylog. + """ + + def __init__(self) -> None: + self.collect_graylog_details("Graylog") + + def collect_graylog_details(self, connector_name: str): + """ + Collects the details of the Graylog connector. + + Args: + connector_name (str): The name of the Graylog connector. + + Returns: + tuple: A tuple containing the connection URL, username, and password. + """ + connector_instance = connector_factory.create(connector_name, connector_name) + connection_successful = connector_instance.verify_connection() + if connection_successful: + connection_details = Connector.get_connector_info_from_db(connector_name) + return ( + connection_details.get("connector_url"), + connection_details.get("connector_username"), + connection_details.get("connector_password"), + ) + else: + return None, None, None diff --git a/backend/app/services/Shuffle/universal.py b/backend/app/services/Shuffle/universal.py new file mode 100644 index 00000000..72461cf2 --- /dev/null +++ b/backend/app/services/Shuffle/universal.py @@ -0,0 +1,46 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from typing import Dict, List +from app import db +from datetime import datetime +import requests +from loguru import logger +from elasticsearch7 import Elasticsearch +from app.models.connectors import connector_factory, Connector + + +class UniversalService: + """ + A service class that encapsulates the logic for polling messages from the Wazuh-Indexer. + """ + + def __init__(self) -> None: + self.collect_shuffle_details("Shuffle") + ( + self.connector_url, + self.connector_api_key, + ) = self.collect_shuffle_details("Shuffle") + + def collect_shuffle_details(self, connector_name: str): + """ + Collects the details of the Shuffle connector. + + Args: + connector_name (str): The name of the Shuffle connector. + + Returns: + tuple: A tuple containing the connection URL, and api key. + """ + connector_instance = connector_factory.create(connector_name, connector_name) + connection_successful = connector_instance.verify_connection() + if connection_successful: + connection_details = Connector.get_connector_info_from_db(connector_name) + return ( + connection_details.get("connector_url"), + connection_details.get("connector_api_key"), + ) + else: + return None, None diff --git a/backend/app/services/Shuffle/workflows.py b/backend/app/services/Shuffle/workflows.py new file mode 100644 index 00000000..862bf051 --- /dev/null +++ b/backend/app/services/Shuffle/workflows.py @@ -0,0 +1,178 @@ +from typing import Dict +import requests +from loguru import logger +from app.services.Shuffle.universal import UniversalService + + +class WorkflowsService: + """ + A service class that encapsulates the logic for pulling workflows from Shuffle. + """ + + def __init__(self): + self._collect_shuffle_details() + self.session = requests.Session() + self.session.headers.update({"Authorization" : f"Bearer {self.connector_api_key}"}) + + def _collect_shuffle_details(self): + self.connector_url, self.connector_api_key = UniversalService().collect_shuffle_details("Shuffle") + + def _are_details_collected(self) -> bool: + return all([self.connector_url, self.connector_api_key]) + + def _send_request(self, url: str): + return self.session.get( + url, + verify=False, + ) + + def collect_workflows(self) -> Dict[str, object]: + """ + Collects the workflows from Shuffle. + + Returns: + dict: A dictionary containing the success status, a message and potentially the workflows. + """ + if not self._are_details_collected(): + return { + "message": "Failed to collect Shuffle details", + "success": False, + } + + workflows = self._collect_workflows() + if not workflows["success"]: + return workflows + + return { + "message": "Successfully collected workflows", + "success": True, + "workflows": workflows["workflows"], + } + + def _handle_request_error(self, err): + logger.error(f"Failed to collect workflows from Shuffle: {err}") + return { + "message": "Failed to collect workflows from Shuffle", + "success": False, + } + + def _collect_workflows(self) -> Dict[str, object]: + """ + Collects the workflows from Shuffle. + + Returns: + dict: A dictionary containing the success status, a message and potentially the workflows. + """ + try: + response = self._send_request(f"{self.connector_url}/api/v1/workflows") + response.raise_for_status() + except requests.exceptions.HTTPError as err: + return self._handle_request_error(err) + + return { + "message": "Successfully collected workflows from Shuffle", + "success": True, + "workflows": response.json(), + } + + def collect_workflow_details(self) -> Dict[str, object]: + """ + Collects the workflow ID and workflow name from Shuffle. + + Returns: + dict: A dictionary containing the success status, a message and potentially the workflow IDs. + """ + if not self._are_details_collected(): + return { + "message": "Failed to collect Shuffle details", + "success": False, + } + + workflows = self._collect_workflow_details() + if not workflows["success"]: + return workflows + + return { + "message": "Successfully collected workflow details", + "success": True, + "workflows": workflows["workflows"], + } + + def _collect_workflow_details(self) -> Dict[str, object]: + """ + Collects the workflow ID and workflow name from Shuffle. + + Returns: + dict: A dictionary containing the success status, a message and potentially the workflow IDs. + """ + try: + response = self._send_request(f"{self.connector_url}/api/v1/workflows") + response.raise_for_status() + except requests.exceptions.HTTPError as err: + return self._handle_request_error(err) + + workflows = response.json() + workflow_details = [] + for workflow in workflows: + workflow_details.append({ + "workflow_id": workflow["id"], + "workflow_name": workflow["name"] + }) + + return { + "message": "Successfully collected workflow details from Shuffle", + "success": True, + "workflows": workflow_details, + } + + def collect_workflow_executions_status(self, workflow_id: str) -> Dict[str, object]: + """ + Collects the execution status of a Shuffle Workflow by its ID. + + Returns: + dict: A dictionary containing the success status, a message and potentially the workflow execution status. + """ + if not self._are_details_collected(): + return { + "message": "Failed to collect Shuffle details", + "success": False, + } + + executions = self._collect_workflow_executions_status(workflow_id) + if not executions["success"]: + return executions + + return { + "message": "Successfully collected workflow executions", + "success": True, + "executions": executions["executions"], + } + + def _collect_workflow_executions_status(self, workflow_id: str) -> Dict[str, object]: + """ + Collects the execution status of a Shuffle Workflow by its ID. + + Returns: + dict: A dictionary containing the success status, a message and potentially the workflow execution status. + """ + try: + response = self._send_request(f"{self.connector_url}/api/v1/workflows/{workflow_id}/executions") + response.raise_for_status() + except requests.exceptions.HTTPError as err: + return self._handle_request_error(err) + + executions = response.json() + if executions: + status = executions[0]["status"] + if status is None: + status = "Never Ran" + else: + logger.info(f"No Workflow Executions found from {self.connector_url}") + status = None + + return { + "message": "Successfully collected workflow executions from Shuffle", + "success": True, + "executions": status, + + } diff --git a/backend/app/services/Velociraptor/artifacts.py b/backend/app/services/Velociraptor/artifacts.py new file mode 100644 index 00000000..275dcff0 --- /dev/null +++ b/backend/app/services/Velociraptor/artifacts.py @@ -0,0 +1,121 @@ +from typing import Dict +from loguru import logger +from pyvelociraptor import api_pb2 +from werkzeug.utils import secure_filename +from app.services.Velociraptor.universal import UniversalService +import json + + +class ArtifactsService: + """ + A service class that encapsulates the logic for pulling artifacts from Velociraptor. + """ + + def __init__(self): + self.universal_service = UniversalService() + + def _create_query(self, query: str): + """ + Create a query string. + + Args: + query (str): The query to be executed. + + Returns: + str: The created query string. + """ + return query + + def _get_artifact_key(self, client_id: str, artifact: str): + """ + Construct the artifact key. + + Args: + client_id (str): The ID of the client. + artifact (str): The name of the artifact. + + Returns: + str: The constructed artifact key. + """ + return f"collect_client(client_id='{client_id}', artifacts=['{artifact}'])" + + def collect_artifacts(self): + """ + Collect the artifacts from Velociraptor. + + Returns: + dict: A dictionary with the success status, a message, and potentially the artifacts. + """ + query = self._create_query("SELECT name FROM artifact_definitions()") + return self.universal_service.execute_query(query) + + def collect_artifacts_prefixed(self, prefix: str): + """ + Collect the artifacts from Velociraptor that have a name beginning with a specific prefix. + + Args: + prefix (str): The prefix to filter the artifacts. + + Returns: + dict: A dictionary with the success status, a message, and potentially the artifacts. + """ + artifacts_response = self.collect_artifacts() + if not artifacts_response["success"]: + return artifacts_response + + filtered_artifacts = [ + artifact + for artifact in artifacts_response["results"] + if artifact["name"].startswith(prefix) + ] + + return { + "success": True, + "message": f"Successfully collected {prefix} artifacts", + "artifacts": filtered_artifacts, + } + + def collect_artifacts_linux(self): + return self.collect_artifacts_prefixed("Linux.") + + def collect_artifacts_windows(self): + return self.collect_artifacts_prefixed("Windows.") + + def collect_artifacts_macos(self): + return self.collect_artifacts_prefixed("MacOS.") + + def run_artifact_collection(self, client_id: str, artifact: str): + """ + Run an artifact collection on a specific client. + + Args: + client_id (str): The ID of the client. + artifact (str): The name of the artifact. + + Returns: + dict: A dictionary with the success status, a message, and potentially the results. + """ + try: + query = self._create_query( + f"SELECT collect_client(client_id='{client_id}', artifacts=['{artifact}']) FROM scope()" + ) + flow = self.universal_service.execute_query(query) + logger.info(f"Successfully ran artifact collection on {flow}") + + artifact_key = self._get_artifact_key(client_id, artifact) + flow_id = flow["results"][0][artifact_key]["flow_id"] + logger.info(f"Successfully ran artifact collection on {flow_id}") + + completed = self.universal_service.watch_flow_completion(flow_id) + logger.info(f"Successfully watched flow completion on {completed}") + + results = self.universal_service.read_collection_results( + client_id, flow_id, artifact + ) + return results + except Exception as err: + logger.error(f"Failed to run artifact collection: {err}") + return { + "message": "Failed to run artifact collection", + "success": False, + } diff --git a/backend/app/services/Velociraptor/universal.py b/backend/app/services/Velociraptor/universal.py new file mode 100644 index 00000000..a261fffd --- /dev/null +++ b/backend/app/services/Velociraptor/universal.py @@ -0,0 +1,209 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from typing import Dict, List +from app import db +from datetime import datetime +import requests +from loguru import logger +from elasticsearch7 import Elasticsearch +from app.models.connectors import connector_factory, Connector +import pyvelociraptor +from pyvelociraptor import api_pb2 +from pyvelociraptor import api_pb2_grpc +import grpc +import json + + +class UniversalService: + """ + A service class that encapsulates the logic for polling messages from Velociraptor. + """ + + def __init__(self) -> None: + self.setup_velociraptor_connector("Velociraptor") + self.setup_grpc_channel_and_stub() + + def setup_velociraptor_connector(self, connector_name: str): + """ + Collects the details of the Velociraptor connector and sets them up. + + Args: + connector_name (str): The name of the Velociraptor connector. + """ + self.connector_url, self.connector_api_key = self.collect_velociraptor_details(connector_name) + self.config = pyvelociraptor.LoadConfigFile(self.connector_api_key) + + def collect_velociraptor_details(self, connector_name: str): + """ + Collects the details of the Velociraptor connector. + + Args: + connector_name (str): The name of the Velociraptor connector. + + Returns: + tuple: A tuple containing the connection URL, and api key. + """ + connector_instance = connector_factory.create(connector_name, connector_name) + connection_successful = connector_instance.verify_connection() + if connection_successful: + connection_details = Connector.get_connector_info_from_db(connector_name) + return ( + connection_details.get("connector_url"), + connection_details.get("connector_api_key"), + ) + else: + return None, None + + def setup_grpc_channel_and_stub(self): + """ + Sets up the gRPC channel and stub for Velociraptor. + """ + creds = grpc.ssl_channel_credentials( + root_certificates=self.config["ca_certificate"].encode("utf8"), + private_key=self.config["client_private_key"].encode("utf8"), + certificate_chain=self.config["client_cert"].encode("utf8"), + ) + options = (("grpc.ssl_target_name_override", "VelociraptorServer"),) + self.channel = grpc.secure_channel(self.config["api_connection_string"], creds, options) + self.stub = api_pb2_grpc.APIStub(self.channel) + + def create_vql_request(self, vql: str): + """ + Creates a VQLCollectorArgs object with given VQL query. + + Args: + vql (str): The VQL query. + + Returns: + VQLCollectorArgs: The VQLCollectorArgs object with given VQL query. + """ + return api_pb2.VQLCollectorArgs( + max_wait=1, + Query=[ + api_pb2.VQLRequest( + Name="VQLRequest", + VQL=vql, + ), + ], + ) + + def execute_query(self, vql: str): + """ + Executes a VQL query and returns the results. + + Args: + vql (str): The VQL query to be executed. + + Returns: + dict: A dictionary with the success status, a message, and potentially the results. + """ + client_request = self.create_vql_request(vql) + try: + results = [] + for response in self.stub.Query(client_request): + if response.Response: + results += json.loads(response.Response) + return { + "success": True, + "message": "Successfully executed query", + "results": results, + } + except Exception as e: + return { + "success": False, + "message": f"Failed to execute query: {e}", + } + + + def watch_flow_completion(self, flow_id: str): + """ + Watch for the completion of a flow. + + Args: + flow_id (str): The ID of the flow. + + Returns: + dict: A dictionary with the success status and a message. + """ + vql = f"SELECT * FROM watch_monitoring(artifact='System.Flow.Completion') WHERE FlowId='{flow_id}' LIMIT 1" + return self.execute_query(vql) + + def read_collection_results(self, client_id: str, flow_id: str, artifact: str = 'Generic.Client.Info/BasicInformation'): + """ + Read the results of a collection. + + Args: + client_id (str): The client ID. + flow_id (str): The ID of the flow. + artifact (str, optional): The artifact. Defaults to 'Generic.Client.Info/BasicInformation'. + + Returns: + dict: A dictionary with the success status, a message, and potentially the results. + """ + vql = f"SELECT * FROM source(client_id='{client_id}', flow_id='{flow_id}', artifact='{artifact}')" + return self.execute_query(vql) + + def get_client_id(self, client_name: str): + """ + Get the client_id associated with a given client_name. + + Args: + client_name (str): The asset name to search for. + + Returns: + dict: A dictionary with the success status, a message, and potentially the client_id. + """ + # Formulate queries + try: + vql_client_id = f"select client_id from clients(search='host:{client_name}')" + vql_last_seen_at = f"select last_seen_at from clients(search='host:{client_name}')" + + # Get the last seen timestamp + last_seen_at = self._get_last_seen_timestamp(vql_last_seen_at) + + # if last_seen_at is longer than 30 seconds from now, return False + if self._is_offline(last_seen_at): + return { + "success": False, + "message": f"{client_name} has not been seen in the last 30 seconds and may not be online with the Velociraptor server.", + "results": [ + {"client_id": None} + ] + } + + return self.execute_query(vql_client_id) + except Exception as e: + return { + "success": False, + "message": f"Failed to get Client ID for {client_name}: {e}", + "results": [ + {"client_id": None} + ] + } + + def _get_last_seen_timestamp(self, vql: str): + """ + Executes the VQL query and returns the last_seen_at timestamp. + + Args: + vql (str): The VQL query. + + Returns: + float: The last_seen_at timestamp. + """ + return self.execute_query(vql)["results"][0]["last_seen_at"] + + def _is_offline(self, last_seen_at: float): + """ + Determines if the client is offline based on the last_seen_at timestamp. + + Args: + last_seen_at (float): The last_seen_at timestamp. + + Returns: + bool: True if the client is offline, False otherwise. + """ + return (datetime.now() - datetime.fromtimestamp(last_seen_at / 1000000)).total_seconds() > 30 diff --git a/backend/app/services/WazuhIndexer/alerts.py b/backend/app/services/WazuhIndexer/alerts.py new file mode 100644 index 00000000..ec8d48b2 --- /dev/null +++ b/backend/app/services/WazuhIndexer/alerts.py @@ -0,0 +1,120 @@ +from elasticsearch7 import Elasticsearch +from loguru import logger +from typing import Dict, List + +from app.services.WazuhIndexer.universal import UniversalService +from app.services.WazuhIndexer.index import IndexService + + +class AlertsService: + """ + A service class that encapsulates the logic for pulling alerts from the Wazuh-Indexer. + """ + + SKIP_INDEX_NAMES: Dict[str, bool] = { + "wazuh-statistics": True, + "wazuh-monitoring": True, + } + + def __init__(self): + ( + self.connector_url, + self.connector_username, + self.connector_password, + ) = UniversalService().collect_wazuhindexer_details("Wazuh-Indexer") + self.es = Elasticsearch( + [self.connector_url], + http_auth=(self.connector_username, self.connector_password), + verify_certs=False, + timeout=15, + max_retries=10, + retry_on_timeout=False, + ) + + def is_index_skipped(self, index_name: str) -> bool: + """Check if the index should be skipped.""" + for skipped in self.SKIP_INDEX_NAMES: + if index_name.startswith(skipped): + return True + return False + + def collect_alerts(self) -> Dict[str, object]: + """ + Collects the alerts from the Wazuh-Indexer where the index name starts with "wazuh_" and is not in the SKIP_INDEX_NAMES list. + Returns the 10 previous alerts based on the `timestamp_utc` field. + + Returns: + Dict[str, object]: A dictionary containing success status and alerts or an error message. + """ + if not all( + [self.connector_url, self.connector_username, self.connector_password] + ): + return { + "message": "Failed to collect Wazuh-Indexer details", + "success": False, + } + + indices_list = UniversalService().collect_indices() + if not indices_list["success"]: + return {"message": "Failed to collect indices", "success": False} + + alerts_summary = [] + for index_name in indices_list["indices_list"]: + if not index_name.startswith("wazuh_") or self.is_index_skipped(index_name): + continue + + alerts = self._collect_alerts(index_name) + if alerts["success"] and len(alerts["alerts"]) > 0: + alerts_summary.append( + { + "index_name": index_name, + "total_alerts": len(alerts["alerts"]), + "last_10_alerts": alerts["alerts"], + } + ) + + return { + "message": "Successfully collected alerts", + "success": True, + "alerts_summary": alerts_summary, + } + + def _collect_alerts(self, index_name: str) -> Dict[str, object]: + """ + Elasticsearch query to get the 10 most recent alerts where the `rule_level` is 12 or higher or the + `syslog_level` field is `ALERT` and return the results in descending order by the `timestamp_utc` field. + + Args: + index_name (str): The name of the index to query. + + Returns: + Dict[str, object]: A dictionary containing success status and alerts or an error message. + """ + logger.info(f"Collecting alerts from {index_name}") + query = self._build_query() + try: + alerts = self.es.search(index=index_name, body=query, size=10) + alerts_list = [alert for alert in alerts["hits"]["hits"]] + return { + "message": "Successfully collected alerts", + "success": True, + "alerts": alerts_list, + } + except Exception as e: + logger.error(f"Failed to collect alerts: {e}") + return {"message": "Failed to collect alerts", "success": False} + + @staticmethod + def _build_query() -> Dict[str, object]: + """Builds and returns the query.""" + return { + "query": { + "bool": { + "should": [ + {"range": {"rule_level": {"gte": 12}}}, + {"match": {"syslog_level": "ALERT"}}, + ] + } + }, + "sort": [{"timestamp_utc": {"order": "desc"}}], + } diff --git a/backend/app/services/WazuhIndexer/cluster.py b/backend/app/services/WazuhIndexer/cluster.py new file mode 100644 index 00000000..fd21a972 --- /dev/null +++ b/backend/app/services/WazuhIndexer/cluster.py @@ -0,0 +1,181 @@ +from typing import Dict +import requests +from elasticsearch7 import Elasticsearch +from loguru import logger +from app.services.WazuhIndexer.universal import UniversalService + + +class ClusterService: + """ + A service class that encapsulates the logic for pulling indices from the Wazuh-Indexer. + """ + + def __init__(self): + self._collect_wazuhindexer_details() + self._initialize_es_client() + + def _collect_wazuhindexer_details(self): + self.connector_url, self.connector_username, self.connector_password = UniversalService().collect_wazuhindexer_details("Wazuh-Indexer") + + def _initialize_es_client(self): + self.es = Elasticsearch( + [self.connector_url], + http_auth=(self.connector_username, self.connector_password), + verify_certs=False, + timeout=15, + max_retries=10, + retry_on_timeout=False, + ) + + def _are_details_collected(self) -> bool: + return all([self.connector_url, self.connector_username, self.connector_password]) + + def collect_node_allocation(self) -> Dict[str, object]: + """ + Collects the node allocation from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message and potentially the index allocation. + """ + if not self._are_details_collected(): + return { + "message": "Failed to collect Wazuh-Indexer details", + "success": False, + } + + index_summary = self._collect_node_allocation() + if not index_summary["success"]: + return index_summary + + return { + "message": "Successfully collected node allocation", + "success": True, + "node_allocation": index_summary["node_allocation"], + } + + def _collect_node_allocation(self) -> Dict[str, object]: + """ + Collects the node allocation from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message and potentially the index allocation. + """ + try: + node_allocation = self.es.cat.allocation(format="json") + node_allocation_list = self._format_node_allocation(node_allocation) + return { + "message": "Successfully collected node allocation", + "success": True, + "node_allocation": node_allocation_list, + } + except Exception as e: + logger.error(f"Failed to collect node allocation: {e}") + return {"message": "Failed to collect node allocation", "success": False} + + def _format_node_allocation(self, node_allocation): + return [ + { + "disk_used": node["disk.used"], + "disk_available": node["disk.avail"], + "disk_total": node["disk.total"], + "disk_percent": node["disk.percent"], + "node": node["node"], + } + for node in node_allocation + ] + + def collect_cluster_health(self) -> Dict[str, object]: + """ + Collects the cluster health from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message and potentially the cluster health. + """ + if not self._are_details_collected(): + return { + "message": "Failed to collect Wazuh-Indexer details", + "success": False, + } + + index_summary = self._collect_cluster_health() + if not index_summary["success"]: + return index_summary + + return { + "message": "Successfully collected cluster health", + "success": True, + "cluster_health": index_summary["cluster_health"], + } + + def _collect_cluster_health(self) -> Dict[str, object]: + """ + Collects the cluster health from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message and potentially the cluster health. + """ + try: + cluster_health = self.es.cluster.health() + return { + "message": "Successfully collected cluster health", + "success": True, + "cluster_health": cluster_health, + } + except Exception as e: + logger.error(f"Failed to collect cluster health: {e}") + return {"message": "Failed to collect cluster health", "success": False} + + def collect_shards(self) -> Dict[str, object]: + """ + Collects the shards from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message and potentially the shards. + """ + if not self._are_details_collected(): + return { + "message": "Failed to collect Wazuh-Indexer details", + "success": False, + } + + index_summary = self._collect_shards() + if not index_summary["success"]: + return index_summary + + return { + "message": "Successfully collected shards", + "success": True, + "shards": index_summary["shards"], + } + + def _collect_shards(self) -> Dict[str, object]: + """ + Collects the shards from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message and potentially the shards. + """ + try: + shards = self.es.cat.shards(format="json") + shards_list = self._format_shards(shards) + return { + "message": "Successfully collected shards", + "success": True, + "shards": shards_list, + } + except Exception as e: + logger.error(f"Failed to collect shards: {e}") + return {"message": "Failed to collect shards", "success": False} + + def _format_shards(self, shards): + return [ + { + "index": shard["index"], + "shard": shard["shard"], + "state": shard["state"], + "size": shard["store"], + "node": shard["node"], + } + for shard in shards + ] + diff --git a/backend/app/services/WazuhIndexer/index.py b/backend/app/services/WazuhIndexer/index.py new file mode 100644 index 00000000..b37de981 --- /dev/null +++ b/backend/app/services/WazuhIndexer/index.py @@ -0,0 +1,79 @@ +from typing import Dict +import requests +from elasticsearch7 import Elasticsearch +from loguru import logger +from app.services.WazuhIndexer.universal import UniversalService + + +class IndexService: + """ + A service class that encapsulates the logic for pulling indices from the Wazuh-Indexer. + """ + + def __init__(self): + self._collect_wazuhindexer_details() + self._initialize_es_client() + + def _collect_wazuhindexer_details(self): + self.connector_url, self.connector_username, self.connector_password = UniversalService().collect_wazuhindexer_details("Wazuh-Indexer") + + def _initialize_es_client(self): + self.es = Elasticsearch( + [self.connector_url], + http_auth=(self.connector_username, self.connector_password), + verify_certs=False, + timeout=15, + max_retries=10, + retry_on_timeout=False, + ) + + def _are_details_collected(self) -> bool: + return all([self.connector_url, self.connector_username, self.connector_password]) + + def collect_indices_summary(self) -> Dict[str, object]: + """ + Collects summary information for each index from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message, and potentially the indices. + """ + if not self._are_details_collected(): + return {"message": "Failed to collect Wazuh-Indexer details", "success": False} + + index_summary = self._collect_indices() + if not index_summary["success"]: + return index_summary + + summary = self._format_indices_summary(index_summary["indices"]) + + return { + "message": "Successfully collected indices summary", + "success": True, + "indices": summary, + } + + def _format_indices_summary(self, indices: Dict[str, object]) -> Dict[str, object]: + return [ + { + "index": index["index"], + "health": index["health"], + "docs_count": index["docs.count"], + "store_size": index["store.size"], + "replica_count": index["rep"], + } + for index in indices + ] + + def _collect_indices(self) -> Dict[str, object]: + """ + Collects the indices from the Wazuh-Indexer. + + Returns: + dict: A dictionary containing the success status, a message and potentially the indices. + """ + try: + indices = self.es.cat.indices(format="json") + return {"message": "Successfully collected indices", "success": True, "indices": indices} + except Exception as e: + logger.error(e) + return {"message": "Failed to collect indices", "success": False} diff --git a/backend/app/services/WazuhIndexer/universal.py b/backend/app/services/WazuhIndexer/universal.py new file mode 100644 index 00000000..b9e59ab6 --- /dev/null +++ b/backend/app/services/WazuhIndexer/universal.py @@ -0,0 +1,99 @@ +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from typing import Dict, List +from app import db +from datetime import datetime +import requests +from loguru import logger +from elasticsearch7 import Elasticsearch +from app.models.connectors import connector_factory, Connector + + +class UniversalService: + """ + A service class that encapsulates the logic for polling messages from the Wazuh-Indexer. + """ + + def __init__(self) -> None: + self.collect_wazuhindexer_details("Wazuh-Indexer") + ( + self.connector_url, + self.connector_username, + self.connector_password, + ) = self.collect_wazuhindexer_details("Wazuh-Indexer") + self.es = Elasticsearch( + [self.connector_url], + http_auth=(self.connector_username, self.connector_password), + verify_certs=False, + timeout=15, + max_retries=10, + retry_on_timeout=False, + ) + + def collect_wazuhindexer_details(self, connector_name: str): + """ + Collects the details of the Wazuh-Indexer connector. + + Args: + connector_name (str): The name of the Wazuh-Indexer connector. + + Returns: + tuple: A tuple containing the connection URL, username, and password. + """ + connector_instance = connector_factory.create(connector_name, connector_name) + connection_successful = connector_instance.verify_connection() + if connection_successful: + connection_details = Connector.get_connector_info_from_db(connector_name) + return ( + connection_details.get("connector_url"), + connection_details.get("connector_username"), + connection_details.get("connector_password"), + ) + else: + return None, None, None + + def collect_indices(self): + """ + Collects the indices from the Wazuh-Indexer. + + Returns: + list: A list containing the indices. + """ + if ( + self.connector_url is None + or self.connector_username is None + or self.connector_password is None + ): + return { + "message": "Failed to collect Wazuh-Indexer details", + "success": False, + } + + indices = self._collect_indices() + + if indices["success"]: + return indices + + return {"message": "Failed to collect indices", "success": False} + + def _collect_indices(self) -> Dict[str, object]: + """ + Wazuh-Indexer query to retrievce all indices. + + Returns: + Dict[str, object]: _description_ + """ + try: + indices_dict = self.es.indices.get_alias("*") + indices_list = list(indices_dict.keys()) + return { + "message": "Successfully collected indices", + "success": True, + "indices_list": indices_list, + } + except Exception as e: + logger.error(f"Failed to collect indices: {e}") + return {"message": "Failed to collect indices", "success": False} diff --git a/backend/app/services/WazuhManager/agent.py b/backend/app/services/WazuhManager/agent.py new file mode 100644 index 00000000..d9aeaa25 --- /dev/null +++ b/backend/app/services/WazuhManager/agent.py @@ -0,0 +1,137 @@ +from typing import Dict, Optional, List, Any +from loguru import logger +from app.services.WazuhManager.universal import UniversalService +import requests + +class WazuhHttpRequests: + """ + Class to handle HTTP requests to the Wazuh API. + """ + def __init__(self, connector_url: str, wazuh_auth_token: str) -> None: + """ + Args: + connector_url (str): The URL of the Wazuh Manager. + wazuh_auth_token (str): The Wazuh API authentication token. + """ + self.connector_url = connector_url + self.wazuh_auth_token = wazuh_auth_token + self.headers = {"Authorization": f"Bearer {wazuh_auth_token}"} + + def delete_request(self, endpoint: str, params: Optional[Dict[str, str]] = None) -> Dict[str, bool]: + """ + Function to handle DELETE requests. + + Args: + endpoint (str): The endpoint to make a DELETE request to. + params (Optional[Dict[str, str]]): Any parameters to pass in the DELETE request. + + Returns: + Dict[str, bool]: A dictionary indicating the success of the operation. + """ + try: + response = requests.delete( + f"{self.connector_url}/{endpoint}", + headers=self.headers, + params=params, + verify=False, + ) + response.raise_for_status() + logger.info(f"Successfully deleted {endpoint}") + return {"agentDeleted": True} + + except Exception as e: + logger.error(f"Failed to delete {endpoint}: {e}") + return {"agentDeleted": False} + +class WazuhManagerAgentService: + """ + A service class that encapsulates the logic for handling agent related operations in Wazuh Manager. + """ + def __init__(self, universal_service: UniversalService) -> None: + """ + Args: + universal_service (UniversalService): The UniversalService instance to use. + """ + self.universal_service = universal_service + self.auth_token = universal_service.get_auth_token() + self.wazuh_http_requests = WazuhHttpRequests(self.universal_service.connector_url, self.auth_token) + + def collect_agents(self) -> Optional[List[Dict[str, str]]]: + """ + Collect all agents from Wazuh Manager. + + Returns: + Optional[List[Dict[str, str]]]: A list of dictionaries containing agent data, or None on failure. + """ + logger.info("Collecting Wazuh Agents") + try: + agent_data = self._get_agent_data() + if agent_data is None: + return None + + wazuh_agents_list = self._build_agent_list(agent_data) + return wazuh_agents_list + except Exception as e: + logger.error(f"Failed to collect Wazuh Agents: {e}") + return None + + def _get_agent_data(self) -> Optional[Dict[str, Any]]: + """ + Get agent data from Wazuh Manager. + + Returns: + Optional[Dict[str, Any]]: A dictionary containing agent data, or None on failure. + """ + headers = {"Authorization": f"Bearer {self.auth_token}"} + limit = 1000 + response = requests.get( + f"{self.universal_service.connector_url}/agents?limit={limit}", headers=headers, verify=False + ) + if response.status_code == 200: + return response.json()["data"]["affected_items"] + else: + return None + + def _build_agent_list(self, agent_data: Dict[str, Any]) -> List[Dict[str, str]]: + """ + Build a list of agent data dictionaries. + + Args: + agent_data (Dict[str, Any]): The raw agent data. + + Returns: + List[Dict[str, str]]: A list of dictionaries containing agent data. + """ + wazuh_agents_list = [] + for agent in agent_data: + os_name = agent.get("os", {}).get("name", "Unknown") + last_keep_alive = agent.get("lastKeepAlive", "Unknown") + wazuh_agents_list.append( + { + "agent_id": agent["id"], + "agent_name": agent["name"], + "agent_ip": agent["ip"], + "agent_os": os_name, + "agent_last_seen": last_keep_alive, + }, + ) + logger.info(f"Collected Wazuh Agent: {agent['name']}") + return wazuh_agents_list + + def delete_agent(self, agent_id: str) -> Dict[str, bool]: + """ + Delete an agent from Wazuh Manager. + + Args: + agent_id (str): The id of the agent to be deleted. + + Returns: + Dict[str, bool]: A dictionary indicating the success of the operation. + """ + params = { + "purge": True, + "agents_list": [agent_id], + "status": "all", + "older_than": "0s", + } + return self.wazuh_http_requests.delete_request("agents", params) diff --git a/backend/app/services/WazuhManager/disabled_rule.py b/backend/app/services/WazuhManager/disabled_rule.py new file mode 100644 index 00000000..619a2c62 --- /dev/null +++ b/backend/app/services/WazuhManager/disabled_rule.py @@ -0,0 +1,179 @@ +from typing import Dict, Optional, Union, List, Any, Tuple +from loguru import logger +from app.services.WazuhManager.universal import UniversalService +import requests +from app.models.rules import DisabledRules +from app.models.connectors import connector_factory, Connector +from app import db +import xmltodict +import xml.etree.ElementTree as ET +import json + +class WazuhHttpRequests: + """ + Class to handle HTTP requests to the Wazuh API. + """ + def __init__(self, connector_url: str, wazuh_auth_token: str) -> None: + """ + Args: + connector_url (str): The URL of the Wazuh Manager. + wazuh_auth_token (str): The Wazuh API authentication token. + """ + self.connector_url = connector_url + self.wazuh_auth_token = wazuh_auth_token + self.headers = {"Authorization": f"Bearer {wazuh_auth_token}"} + + def get_request(self, endpoint: str, params: Optional[Dict[str, str]] = None) -> Dict[str, Union[str, bool]]: + """ + Function to handle GET requests. + + Args: + endpoint (str): The endpoint to make a GET request to. + params (Optional[Dict[str, str]]): Any parameters to pass in the GET request. + + Returns: + Dict[str, Union[str, bool]]: A dictionary with the requested data or error message. + """ + try: + response = requests.get( + f"{self.connector_url}/{endpoint}", + headers=self.headers, + params=params, + verify=False, + ) + response.raise_for_status() + return {"data": response.json(), "success": True} + + except Exception as e: + logger.error(f"GET request to {endpoint} failed: {e}") + return {"message": f"GET request to {endpoint} failed: {e}", "success": False} + + def put_request(self, endpoint: str, data: str, params: Optional[Dict[str, str]] = None) -> Dict[str, bool]: + """ + Function to handle PUT requests. + + Args: + endpoint (str): The endpoint to make a PUT request to. + data (str): Data to be updated on the PUT request. + params (Optional[Dict[str, str]]): Any parameters to pass in the PUT request. + + Returns: + Dict[str, bool]: A dictionary indicating the success of the operation. + """ + try: + headers = self.headers.copy() + headers.update({"Content-Type": "application/octet-stream"}) + + response = requests.put( + f"{self.connector_url}/{endpoint}", + headers=headers, + params=params, + data=data, + verify=False, + ) + response.raise_for_status() + return {"message": f"Successfully updated {endpoint}", "success": True} + + except Exception as e: + logger.error(f"Failed to update {endpoint}: {e}") + return {"message": f"Failed to update {endpoint}: {e}", "success": False} + + +class DisableRuleService: + """ + A service class that encapsulates the logic for handling rule disabling related operations in Wazuh Manager. + """ + def __init__(self, universal_service: UniversalService) -> None: + """ + Args: + universal_service (UniversalService): The UniversalService instance to use. + """ + self.universal_service = universal_service + self.auth_token = universal_service.get_auth_token() + self.wazuh_http_requests = WazuhHttpRequests(self.universal_service.connector_url, self.auth_token) + + def disable_rule(self, request: Dict[str, Union[str, int]]) -> Dict[str, Union[str, bool]]: + try: + self._validate_request(request) + rule_id = request["rule_id"] + filename = self._fetch_filename(rule_id) + file_content = self._fetch_file_content(filename) + previous_level, updated_file_content = self._set_level_1(file_content, rule_id) + xml_content = self._convert_to_xml(updated_file_content) + self._store_disabled_rule_info(rule_id, previous_level, request["reason"], request["length_of_time"]) + self._upload_updated_rule(filename, xml_content) + UniversalService().restart_service() + return {"message": f"Rule {rule_id} successfully disabled in file {filename}.", "success": True} + + except Exception as e: + logger.error(str(e)) + return {"message": str(e), "success": False} + + def _validate_request(self, request: Dict[str, Union[str, int]]): + logger.info(f"Validating disable rule request: {request}") + if "rule_id" not in request: + raise ValueError("Request missing rule_id") + if "reason" not in request: + raise ValueError("Request missing reason") + if "length_of_time" not in request: + raise ValueError("Request missing length_of_time") + request["length_of_time"] = int(request["length_of_time"]) + + def _fetch_filename(self, rule_id: str) -> str: + filename_data = self.wazuh_http_requests.get_request("rules", {"rule_ids": rule_id}) + if not filename_data["success"]: + raise ValueError(filename_data["message"]) + return filename_data["data"]["data"]["affected_items"][0]["filename"] + + def _fetch_file_content(self, filename: str) -> Union[Dict[str, str], List[Dict[str, str]]]: + file_content_data = self.wazuh_http_requests.get_request(f"rules/files/{filename}") + if not file_content_data["success"]: + raise ValueError(file_content_data["message"]) + return file_content_data["data"]["data"]["affected_items"][0]["group"] + + def _set_level_1(self, file_content: Union[Dict[str, str], List[Dict[str, str]]], rule_id: str) -> Tuple[str, Union[Dict[str, str], List[Dict[str, str]]]]: + logger.info(f"Setting rule {rule_id} level to 1 for file_content: {file_content}") + previous_level = None + if isinstance(file_content, dict): + file_content = [file_content] + for group_block in file_content: + rule_block = group_block.get("rule", None) + if not rule_block: + continue + if isinstance(rule_block, dict): + rule_block = [rule_block] + for rule in rule_block: + if rule["@id"] == rule_id: + previous_level = rule["@level"] + rule["@level"] = "1" + break + return previous_level, file_content + + def _convert_to_xml(self, updated_file_content: Union[Dict[str, str], List[Dict[str, str]]]) -> str: + logger.info(f"Received updated_file_content: {updated_file_content}") + xml_content_list = [] + for group in updated_file_content: + xml_dict = {"group": group} + xml_content = xmltodict.unparse(xml_dict, pretty=True) + xml_content = xml_content.replace('', "") + xml_content_list.append(xml_content) + xml_content = "\n".join(xml_content_list) + xml_content = xml_content.strip() + return xml_content + + def _store_disabled_rule_info(self, rule_id: str, previous_level: str, reason: str, length_of_time: str): + disabled_rule = DisabledRules( + rule_id=rule_id, + previous_level=previous_level, + new_level="1", + reason_for_disabling=reason, + length_of_time=length_of_time, + ) + db.session.add(disabled_rule) + db.session.commit() + + def _upload_updated_rule(self, filename: str, xml_content: str): + response = self.wazuh_http_requests.put_request(f"rules/files/{filename}", xml_content, {"overwrite": "true"}) + if not response["success"]: + raise ValueError(response["message"]) + diff --git a/backend/app/services/WazuhManager/enabled_rule.py b/backend/app/services/WazuhManager/enabled_rule.py new file mode 100644 index 00000000..5231b367 --- /dev/null +++ b/backend/app/services/WazuhManager/enabled_rule.py @@ -0,0 +1,290 @@ +from typing import Dict, Optional, Union, List, Any, Tuple +from loguru import logger +from app.services.WazuhManager.universal import UniversalService +import requests +from app.models.rules import DisabledRules +from app.models.connectors import connector_factory, Connector +from app import db +import xmltodict +import xml.etree.ElementTree as ET +import json + + +class WazuhHttpRequests: + """ + Class to handle HTTP requests to the Wazuh API. + """ + def __init__(self, connector_url: str, wazuh_auth_token: str) -> None: + """ + Args: + connector_url (str): The URL of the Wazuh Manager. + wazuh_auth_token (str): The Wazuh API authentication token. + """ + self.connector_url = connector_url + self.wazuh_auth_token = wazuh_auth_token + self.headers = {"Authorization": f"Bearer {wazuh_auth_token}"} + + def get_request(self, endpoint: str, params: Optional[Dict[str, str]] = None) -> Dict[str, Union[str, bool]]: + """ + Function to handle GET requests. + + Args: + endpoint (str): The endpoint to make a GET request to. + params (Optional[Dict[str, str]]): Any parameters to pass in the GET request. + + Returns: + Dict[str, Union[str, bool]]: A dictionary with the requested data or error message. + """ + try: + logger.info(f"GET request to {endpoint}") + response = requests.get( + f"{self.connector_url}/{endpoint}", + headers=self.headers, + params=params, + verify=False, + ) + response.raise_for_status() + logger.info(f"Respones: {response.json()}") + return {"data": response.json(), "success": True} + + except Exception as e: + logger.error(f"GET request to {endpoint} failed: {e}") + return {"message": f"GET request to {endpoint} failed: {e}", "success": False} + + def put_request(self, endpoint: str, data: str, params: Optional[Dict[str, str]] = None) -> Dict[str, bool]: + """ + Function to handle PUT requests. + + Args: + endpoint (str): The endpoint to make a PUT request to. + data (str): Data to be updated on the PUT request. + params (Optional[Dict[str, str]]): Any parameters to pass in the PUT request. + + Returns: + Dict[str, bool]: A dictionary indicating the success of the operation. + """ + try: + headers = self.headers.copy() + headers.update({"Content-Type": "application/octet-stream"}) + + response = requests.put( + f"{self.connector_url}/{endpoint}", + headers=headers, + params=params, + data=data, + verify=False, + ) + response.raise_for_status() + return {"message": f"Successfully updated {endpoint}", "success": True} + + except Exception as e: + logger.error(f"Failed to update {endpoint}: {e}") + return {"message": f"Failed to update {endpoint}: {e}", "success": False} + + +class EnableRuleService: + """ + A service class that encapsulates the logic for handling rule enabling related operations in Wazuh Manager. + """ + def __init__(self, universal_service: UniversalService) -> None: + """ + Args: + universal_service (UniversalService): The UniversalService instance to use. + """ + self.universal_service = universal_service + self.auth_token = universal_service.get_auth_token() + self.wazuh_http_requests = WazuhHttpRequests(self.universal_service.connector_url, self.auth_token) + + def enable_rule(self, request: Dict[str, str]) -> Dict[str, Union[str, bool]]: + """ + Enable a rule in the Wazuh Manager. + + Args: + request (Dict[str, str]): The request to enable a rule in Wazuh Manager. + + Returns: + Dict[str, Union[str, bool]]: A dictionary containing status of the operation. + """ + try: + self._validate_request(request) + rule_id = request["rule_id"] + filename = self._fetch_filename(rule_id) + logger.info(f"Getting file content of {filename}") + file_content = self._fetch_file_content(filename) + previous_level = self._get_previous_level(rule_id) + updated_file_content = self._set_level_previous(file_content, rule_id, previous_level) + xml_content = self._json_to_xml(updated_file_content) + self._delete_rule_from_db(rule_id) + self._put_updated_rule(filename, xml_content) + UniversalService().restart_service() + return { + "message": f"Rule {rule_id} successfully enabled in file {filename}.", + "success": True, + } + except Exception as e: + return {"message": str(e), "success": False} + + def _validate_request(self, request: Dict[str, str]) -> str: + """ + Validate the request to enable a rule in Wazuh Manager and return rule_id. + + Args: + request (Dict[str, str]): The request to enable a rule in Wazuh Manager. + + Raises: + ValueError: If the request is missing rule_id. + + Returns: + str: rule_id. + """ + logger.info(f"Validating enable rule request: {request}") + if "rule_id" not in request: + raise ValueError("Request missing rule_id") + return request["rule_id"] + + def _fetch_filename(self, rule_id: str) -> str: + """ + Get the filename of the rule to be enabled. + + Args: + rule_id (str): The id of the rule to be enabled. + + Raises: + RuntimeError: If the filename cannot be obtained. + + Returns: + str: The filename of the rule to be enabled. + """ + filename_data = self.wazuh_http_requests.get_request("rules", {"rule_ids": rule_id}) + if not filename_data["success"]: + raise ValueError(filename_data["message"]) + return filename_data["data"]["data"]["affected_items"][0]["filename"] + + def _fetch_file_content(self, filename: str) -> Any: + """ + Get the content of the rule file. + + Args: + filename (str): The filename of the rule to be enabled. + + Raises: + RuntimeError: If the file content cannot be obtained. + + Returns: + Any: The content of the rule file. + """ + file_content_data = self.wazuh_http_requests.get_request(f"rules/files/{filename}") + if not file_content_data["success"]: + raise ValueError(file_content_data["message"]) + return file_content_data["data"]["data"]["affected_items"][0]["group"] + + def _get_previous_level(self, rule_id: str) -> str: + """ + Get the previous level of the rule from the `disabled_rules` table. + + Args: + rule_id (str):The rule id to be enabled. + + Raises: + ValueError: If the rule was not previously disabled. + + Returns: + str: The previous level of the rule. + """ + disabled_rule = DisabledRules.query.filter_by(rule_id=rule_id).first() + if not disabled_rule: + raise ValueError(f"Rule {rule_id} is not disabled.") + return disabled_rule.previous_level + + def _set_level_previous(self, file_content: Any, rule_id: str, previous_level: str) -> Any: + """ + Set the level of the rule to be enabled to the previous level. + + Args: + file_content (Any): The content of the rule to be enabled. + rule_id (str): The id of the rule to be enabled. + previous_level (str): The previous level of the rule to be enabled. + + Returns: + Any: The content of the rule with the level set to the previous level. + """ + logger.info( + f"Setting rule {rule_id} level to {previous_level} for file_content: {file_content}" + ) + # If 'file_content' is a dictionary (representing a single group), make it a list of one group + if isinstance(file_content, dict): + file_content = [file_content] + + for group_block in file_content: + rule_block = group_block.get("rule", None) + if not rule_block: + continue + if isinstance(rule_block, dict): + rule_block = [rule_block] + + for rule in rule_block: + if rule["@id"] == rule_id: + # Set the rule level to the previous level. + rule["@level"] = previous_level + break + + return file_content + + def _json_to_xml(self, file_content: Any) -> str: + """ + Convert the rule content from JSON to XML. + + Args: + file_content (Any): The content of the rule to be enabled. + + Raises: + Exception: If the JSON to XML conversion fails. + + Returns: + str: The content of the rule to be enabled in XML format. + """ + logger.info(f"Converting file_content to XML: {file_content}") + + xml_content_list = [] + for group in file_content: + xml_dict = {"group": group} + xml_content = xmltodict.unparse(xml_dict, pretty=True) + # Remove the `` from the + # beginning of the XML string. + xml_content = xml_content.replace( + '', "" + ) + xml_content_list.append(xml_content) + + # Concatenate all XML strings + xml_content = "\n".join(xml_content_list) + # Remove top and bottom line breaks + xml_content = xml_content.strip() + + return xml_content + + def _delete_rule_from_db(self, rule_id: str): + """ + Delete the rule from the `disabled_rules` table. + + Args: + rule_id (str): The rule id to be deleted. + """ + disabled_rule = DisabledRules.query.filter_by(rule_id=rule_id).first() + db.session.delete(disabled_rule) + db.session.commit() + + def _put_updated_rule(self, filename: str, xml_content: str): + """ + PUT the updated rule to the Wazuh Manager. + + Args: + filename (str): The filename of the rule. + xml_content (str): The XML content of the rule. + + Raises: + RuntimeError: If the PUT operation fails. + """ + response = self.wazuh_http_requests.put_request(f"rules/files/{filename}", xml_content, params={"overwrite": "true"}) + if not response["success"]: + raise RuntimeError(f"Could not PUT rule {filename}") diff --git a/backend/app/services/WazuhManager/universal.py b/backend/app/services/WazuhManager/universal.py new file mode 100644 index 00000000..0af61fc1 --- /dev/null +++ b/backend/app/services/WazuhManager/universal.py @@ -0,0 +1,78 @@ +from loguru import logger +import requests +from app.models.connectors import connector_factory, Connector + +class UniversalService: + """ + A service class that encapsulates the logic for polling messages from the Wazuh-Manager API. + """ + + def __init__(self) -> None: + ( + self.connector_url, + self.connector_username, + self.connector_password, + ) = self.collect_wazuhmanager_details("Wazuh-Manager") + + def collect_wazuhmanager_details(self, connector_name: str): + connector_instance = connector_factory.create(connector_name, connector_name) + if connector_instance.verify_connection(): + connection_details = Connector.get_connector_info_from_db(connector_name) + return ( + connection_details.get("connector_url"), + connection_details.get("connector_username"), + connection_details.get("connector_password"), + ) + else: + logger.error(f"Connection to {connector_name} failed.") + return None, None, None + + def get_auth_token(self): + """ + Gets the authentication token from the Wazuh-Manager API. + + Returns: + str: The authentication token. + """ + try: + response = requests.get( + f"{self.connector_url}/security/user/authenticate", + auth=(self.connector_username, self.connector_password), + verify=False, + ) + response.raise_for_status() + except requests.exceptions.RequestException as e: + logger.error(f"Failed to get auth token: {e}") + return None + auth_token = response.json()["data"]["token"] + logger.info(f"Authentication token: {auth_token}") + return auth_token + + def restart_service(self): + """ + Restart the Wazuh Manager service. + + Returns: + json: A JSON response containing the updated agent information. + """ + headers = {"Authorization": f"Bearer {self.get_auth_token()}"} + try: + response = requests.put( + f"{self.connector_url}/manager/restart", + headers=headers, + verify=False, + ) + if response.status_code == 200: + logger.info(f"Wazuh Manager service restarted") + return {"message": "Wazuh Manager service restarted", "success": True} + else: + logger.error( + f"Wazuh Manager service restart failed with error: {response.text}" + ) + return { + "message": "Wazuh Manager service restart failed", + "success": False, + } + except Exception as e: + logger.error(f"Wazuh Manager service restart failed with error: {e}") + return {"message": "Wazuh Manager service restart failed", "success": False} diff --git a/backend/app/services/WazuhManager/vulnerability.py b/backend/app/services/WazuhManager/vulnerability.py new file mode 100644 index 00000000..3f05a93d --- /dev/null +++ b/backend/app/services/WazuhManager/vulnerability.py @@ -0,0 +1,107 @@ +from typing import Dict, List, Optional, Any +from loguru import logger +from app.services.WazuhManager.universal import UniversalService +import requests + +class WazuhHttpRequests: + """ + Class to handle HTTP requests to the Wazuh API. + """ + def __init__(self, connector_url: str, wazuh_auth_token: str) -> None: + """ + Args: + connector_url (str): The URL of the Wazuh Manager. + wazuh_auth_token (str): The Wazuh API authentication token. + """ + self.connector_url = connector_url + self.wazuh_auth_token = wazuh_auth_token + self.headers = {"Authorization": f"Bearer {wazuh_auth_token}"} + + def get_request(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]: + """ + Function to handle GET requests. + + Args: + endpoint (str): The endpoint to make a GET request to. + params (Optional[Dict[str, Any]]): Any parameters to pass in the GET request. + + Returns: + Optional[Dict[str, Any]]: The response from the GET request as a dictionary. + """ + try: + response = requests.get( + f"{self.connector_url}/{endpoint}", + headers=self.headers, + params=params, + verify=False, + ) + response.raise_for_status() + return response.json() + + except Exception as e: + logger.error(f"GET request to {self.connector_url}/{endpoint} failed with error: {e}") + return None + +class VulnerabilityService: + """ + A service class that encapsulates the logic for pulling API data from Wazuh Manager. + """ + def __init__(self, universal_service: UniversalService) -> None: + """ + Args: + universal_service (UniversalService): The UniversalService instance to use. + """ + self.universal_service = universal_service + self.auth_token = universal_service.get_auth_token() + self.wazuh_http_requests = WazuhHttpRequests(self.universal_service.connector_url, self.auth_token) + + def agent_vulnerabilities(self, agent_id: str) -> List[Dict[str, Any]]: + """ + Get the vulnerabilities of an agent from Wazuh Manager. + + Args: + agent_id (str): The id of the agent to get vulnerabilities for. + + Returns: + List[Dict[str, Any]]: A list of processed vulnerabilities. + """ + response = self.wazuh_http_requests.get_request(f"vulnerability/{agent_id}", params={"wait_for_complete": True}) + + if response is not None: + processed_vulnerabilities = self._process_agent_vulnerabilities(response) + return processed_vulnerabilities + return [] + + def _process_agent_vulnerabilities(self, response: Dict[str, Any]) -> List[Dict[str, Any]]: + """ + Process the vulnerabilities of an agent from Wazuh Manager. + + Args: + response (dict): The response from Wazuh Manager containing agent vulnerabilities. + + Returns: + List[Dict[str, Any]]: A list of processed vulnerabilities. + """ + vulnerabilities = response.get("data", {}).get("affected_items", []) + # Use list comprehension to create the processed_vulnerabilities list + processed_vulnerabilities = [ + { + "severity": vuln.get("severity"), + "updated": vuln.get("updated"), + "version": vuln.get("version"), + "type": vuln.get("type"), + "name": vuln.get("name"), + "external_references": vuln.get("external_references"), + "condition": vuln.get("condition"), + "detection_time": vuln.get("detection_time"), + "cvss3_score": vuln.get("cvss3_score"), + "published": vuln.get("published"), + "architecture": vuln.get("architecture"), + "cve": vuln.get("cve"), + "status": vuln.get("status"), + "title": vuln.get("title"), + "cvss2_score": vuln.get("cvss2_score"), + } + for vuln in vulnerabilities + ] + return processed_vulnerabilities diff --git a/backend/app/services/agents/agents.py b/backend/app/services/agents/agents.py new file mode 100644 index 00000000..33c9d798 --- /dev/null +++ b/backend/app/services/agents/agents.py @@ -0,0 +1,257 @@ +# services.py +from app.models.agents import ( + AgentMetadata, + agent_metadata_schema, + agent_metadatas_schema, +) +from app import db +from datetime import datetime +import requests +from loguru import logger +from app.models.connectors import connector_factory, Connector, WazuhManagerConnector + + +class AgentService: + """ + A service class that encapsulates the logic for managing agents. + """ + + def get_all_agents(self): + """ + Retrieves all agents from the database. + + Returns: + List[dict]: A list of dictionaries where each dictionary represents the serialized data of an agent. + """ + agents = db.session.query(AgentMetadata).all() + return agent_metadatas_schema.dump(agents) + + def get_agent(self, agent_id): + """ + Retrieves a specific agent from the database using its ID. + + Args: + agent_id (str): The ID of the agent to retrieve. + + Returns: + dict: A dictionary representing the serialized data of the agent if found, otherwise a message indicating that the agent was not found. + """ + agent = db.session.query(AgentMetadata).filter_by(agent_id=agent_id).first() + if agent is None: + return {"message": f"Agent with ID {agent_id} not found"} + return agent_metadata_schema.dump(agent) + + def mark_agent_as_critical(self, agent_id): + """ + Marks a specific agent as critical. + + Args: + agent_id (str): The ID of the agent to mark as critical. + + Returns: + dict: A dictionary representing a success message if the operation was successful, otherwise an error message. + """ + agent = db.session.query(AgentMetadata).filter_by(agent_id=agent_id).first() + + if agent is None: + return {"message": f"Agent {agent_id} not found", "success": False} + + agent.mark_as_critical() + agent_details = agent_metadata_schema.dump(agent) + if agent_details["critical_asset"] == False: + return { + "message": f"Agent {agent_id} failed to mark agent as critical", + "success": False, + } + return {"message": f"Agent {agent_id} marked as critical", "success": True} + + def mark_agent_as_non_critical(self, agent_id): + """ + Marks a specific agent as non-critical. + + Args: + agent_id (str): The ID of the agent to mark as non-critical. + + Returns: + dict: A dictionary representing a success message if the operation was successful, otherwise an error message. + """ + agent = db.session.query(AgentMetadata).filter_by(agent_id=agent_id).first() + + if agent is None: + return {"message": f"Agent {agent_id} not found", "success": False} + + agent.mark_as_non_critical() + agent_details = agent_metadata_schema.dump(agent) + if agent_details["critical_asset"] == True: + return { + "message": f"Agent {agent_id} failed to mark agent as non-critical", + "success": False, + } + return {"message": f"Agent {agent_id} marked as non-critical", "success": True} + + def create_agent(self, agent): + """ + Creates a new agent in the database. + + Args: + agent (dict): A dictionary containing the information of an agent. + + Returns: + The agent object if the agent was successfully created, None otherwise. + """ + try: + agent_last_seen = datetime.strptime( + agent["agent_last_seen"], "%Y-%m-%dT%H:%M:%S+00:00" + ) # Convert to datetime + except ValueError: + logger.info( + f"Invalid format for agent_last_seen: {agent['agent_last_seen']}. Fixing..." + ) + agent_last_seen = datetime.strptime( + "1970-01-01T00:00:00+00:00", "%Y-%m-%dT%H:%M:%S+00:00" + ) # Use the epoch time as default + + agent_metadata = AgentMetadata( + agent_id=agent["agent_id"], + hostname=agent["agent_name"], + ip_address=agent["agent_ip"], + os=agent["agent_os"], + last_seen=agent_last_seen, # Use the datetime object + critical_asset=False, + ) + logger.info(f"Agent metadata: {agent_metadata}") + + try: + db.session.add(agent_metadata) + db.session.commit() + return agent_metadata + except Exception as e: + logger.error(f"Failed to create agent: {e}") + return None + + def delete_agent_db(self, agent_id): + """ + Deletes a specific agent from the database using its ID. + + Args: + agent_id (str): The ID of the agent to delete. + + Returns: + dict: A dictionary representing a success message if the operation was successful, otherwise an error message. + """ + agent = db.session.query(AgentMetadata).filter_by(agent_id=agent_id).first() + if agent is None: + return {"message": f"Agent with ID {agent_id} not found", "success": False} + try: + db.session.delete(agent) + db.session.commit() + return {"message": f"Agent with ID {agent_id} deleted", "success": True} + except Exception as e: + logger.error(f"Failed to delete agent: {e}") + return { + "message": f"Failed to delete agent with ID {agent_id}", + "success": False, + } + + +class AgentSyncService: + def __init__(self): + self.agent_service = AgentService() + + def collect_wazuh_details(self, connector_name: str): + """ + Collects the information of all Wazuh API credentials using the WazuhIndexerConnector class details. + + Returns: + tuple: A tuple containing the connection URL, username, and password. + """ + connector_instance = connector_factory.create(connector_name, connector_name) + connection_successful = connector_instance.verify_connection() + if connection_successful: + connection_details = Connector.get_connector_info_from_db(connector_name) + return ( + connection_details.get("connector_url"), + connection_details.get("connector_username"), + connection_details.get("connector_password"), + ) + else: + return None, None, None + + def collect_wazuh_agents(self, connection_url: str, wazuh_auth_token: str): + """ + Collects the information of all agents from the Wazuh API. + + Returns: + list: A list containing the information of all Wazuh agents. + """ + logger.info("Collecting Wazuh Agents") + try: + headers = {"Authorization": f"Bearer {wazuh_auth_token}"} + limit = 1000 + agents_collected = requests.get( + f"{connection_url}/agents?limit={limit}", headers=headers, verify=False + ) + if agents_collected.status_code == 200: + wazuh_agents_list = [] + for agent in agents_collected.json()["data"]["affected_items"]: + os_name = agent.get("os", {}).get("name", "Unknown") + last_keep_alive = agent.get("lastKeepAlive", "Unknown") + wazuh_agents_list.append( + { + "agent_id": agent["id"], + "agent_name": agent["name"], + "agent_ip": agent["ip"], + "agent_os": os_name, + "agent_last_seen": last_keep_alive, + }, + ) + logger.info(f"Collected Wazuh Agent: {agent['name']}") + return wazuh_agents_list + else: + return None + except Exception as e: + logger.error(f"Failed to collect Wazuh Agents: {e}") + return None + + def sync_agents(self): + ( + connection_url, + connection_username, + connection_password, + ) = self.collect_wazuh_details("Wazuh-Manager") + if connection_url is None: + return { + "message": "Failed to get Wazuh-Manager API details", + "success": False, + } + + wazuh_manager_connector = WazuhManagerConnector("Wazuh-Manager") + wazuh_auth_token = wazuh_manager_connector.get_auth_token() + if wazuh_auth_token is None: + return { + "message": "Failed to get Wazuh-Manager API Auth Token", + "success": False, + } + + wazuh_agents_list = self.collect_wazuh_agents(connection_url, wazuh_auth_token) + if wazuh_agents_list is None: + return { + "message": "Failed to collect Wazuh-Manager Agents", + "success": False, + } + + logger.info(f"Collected {wazuh_agents_list} Wazuh Agents") + + agents_added_list = [] + for agent in wazuh_agents_list: + agent_info = self.agent_service.get_agent(agent["agent_id"]) + logger.info(f"Agent info: {agent_info}") + if "message" in agent_info: + self.agent_service.create_agent(agent) + agents_added_list.append(agent) + + return { + "message": "Successfully synced agents.", + "success": True, + "agents_added": agents_added_list, + } diff --git a/backend/app/services/connectors/connectors.py b/backend/app/services/connectors/connectors.py new file mode 100644 index 00000000..9d5c11b8 --- /dev/null +++ b/backend/app/services/connectors/connectors.py @@ -0,0 +1,201 @@ +from app.models.connectors import connector_factory, Connector, ConnectorFactory +from app.models.models import Connectors, connectors_schema, ConnectorsAvailable +from sqlalchemy.exc import SQLAlchemyError +from loguru import logger +from flask import current_app + + +class ConnectorService: + def __init__(self, db): + self.db = db + + def update_connector_in_db(self, connector_id: int, updated_data: dict): + logger.info(f"Updating connector {connector_id} with data {updated_data}") + try: + connector = ( + self.db.session.query(Connectors).filter_by(id=connector_id).first() + ) + if connector: + for key, value in updated_data.items(): + if hasattr(connector, key): + setattr(connector, key, value) + + self.db.session.commit() + return { + "success": True, + "message": f"Connector {connector_id} updated successfully", + "connector_name": connector.connector_name, + } + + else: + return { + "success": False, + "message": f"No connector found with id {connector_id}", + } + except SQLAlchemyError as e: + return {"success": False, "message": f"Database error occurred: {e}"} + + def process_connector(self, connector_name: str): + """ + Creates a connector instance, verifies the connection, and returns the connector details. + + Args: + connector_name (str): The name of the connector to be processed. + + Returns: + dict: A dictionary containing the name of the connector and the status of the connection verification. + """ + connector_instance = connector_factory.create(connector_name, connector_name) + connection_successful = connector_instance.verify_connection() + connection_details = Connector.get_connector_info_from_db(connector_name) + logger.info(f"Connection details: {connection_details}") + return {"name": connector_name, **connection_successful, **connection_details} + + def validate_connector_exists(self, connector_id: int): + """ + Validates that a connector exists in the database. Returns a dictionary containing the validation status and a message indicating the status. + + Args: + connector_id (int): The id of the connector to be validated. + + Returns: + dict: A dictionary containing the validation status and a message indicating the status. + """ + try: + connector = ( + current_app.extensions["sqlalchemy"] + .db.session.query(Connectors) + .filter_by(id=connector_id) + .first() + ) + if connector: + return { + "message": "Connector exists", + "connector_name": connector.connector_name, + "success": True, + } + else: + return { + "message": f"No connector found with id {connector_id}", + "success": False, + } + except SQLAlchemyError as e: + return {"message": f"Database error occurred: {e}", "success": False} + + def update_connector(self, connector_id: int, updated_data: dict): + """ + Updates a connector in the database. + + Args: + connector_id (int): The id of the connector to be updated. + updated_data (dict): A dictionary containing the updated data for the connector. + + Returns: + dict: A dictionary containing the success status and a message indicating the status. If the update operation was successful, it returns the connector name. + """ + try: + connector = ( + self.db.session.query(Connectors).filter_by(id=connector_id).first() + ) + if connector is None: + return { + "message": f"No connector found with id {connector_id}", + "success": False, + } + + for key, value in updated_data.items(): + if hasattr(connector, key): + setattr(connector, key, value) + + self.db.session.commit() + + return { + "message": "Connector updated successfully", + "connector_name": connector.connector_name, + "success": True, + } + + except SQLAlchemyError as e: + return {"message": f"Database error occurred: {e}", "success": False} + + def verify_connector_connection(self, connector_id: int): + """ + Verifies the connection of a connector. + + Args: + connector_id (int): The id of the connector to be verified. + + Returns: + dict: A dictionary containing the success status and a message indicating the status. If the verification operation was successful, it returns the connector name. + """ + try: + connector = ( + self.db.session.query(Connectors).filter_by(id=connector_id).first() + ) + if connector is None: + return { + "message": f"No connector found with id {connector_id}", + "success": False, + } + connector_instance = connector_factory.create( + connector.connector_name, connector.connector_name + ) + connection_successful = connector_instance.verify_connection() + # Connection successful: {'connectionSuccessful': False} + if connection_successful.get("connectionSuccessful", False) is False: + return { + "message": "Connector connection failed", + "connector_name": connector.connector_name, + "success": True, + **connection_successful, + } + return { + "message": "Connector connection verified successfully", + "connector_name": connector.connector_name, + "success": True, + **connection_successful, + } + except SQLAlchemyError as e: + return {"message": f"Database error occurred: {e}", "success": False} + + def validate_request_data(self, request_data: dict): + """ + Validates the request data to ensure `connector_url`, `connector_username` and `connector_password` are present. Returns a dictionary containing the validation status and a message indicating the status. + + Args: + request_data (dict): A dictionary containing the request data. + + Returns: + dict: A dictionary containing the validation status and a message indicating the status. + """ + if ( + request_data.get("connector_url", None) + and request_data.get("connector_username", None) + and request_data.get("connector_password", None) + ): + return {"message": "Request data is valid", "success": True} + else: + return { + "message": "Request data is invalid. Ensure connector_url, connector_username and connector_password are present", + "success": False, + } + + def validate_request_data_api_key(self, request_data: dict): + """ + Validates the request data to ensure `connector_url` and `connector_api_key` are present. Returns a dictionary containing the validation status and a message indicating the status. + + Args: + request_data (dict): A dictionary containing the request data. + + Returns: + dict: A dictionary containing the validation status and a message indicating the status. + """ + if request_data.get("connector_url", None) and request_data.get( + "connector_api_key", None + ): + return {"message": "Request data is valid", "success": True} + else: + return { + "message": "Request data is invalid. Ensure connector_url and connector_api_key are present", + "success": False, + } diff --git a/backend/app/static/swagger.json b/backend/app/static/swagger.json new file mode 100644 index 00000000..77ec8796 --- /dev/null +++ b/backend/app/static/swagger.json @@ -0,0 +1,1945 @@ +{ + "openapi": "3.0.3", + "info": { + "title": "SOCFortress CoPilot", + "version": "1.0.0" + }, + "servers": [ + { + "url": "http://localhost:5000" + } + ], + "tags": [ + { + "name": "Connectors", + "description": "Everything about your Connectors", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "Agents", + "description": "Everything about your Agents", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "Rules", + "description": "Everything about your Wazuh Rules", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "Graylog", + "description": "Everything about Graylog", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "Wazuh-Indexer", + "description": "Everything about Wazuh-Indexer", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "Shuffle", + "description": "Everything about Shuffle", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "Velociraptor", + "description": "Everything about Velociraptor", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "DFIR-IRIS", + "description": "Everything about DFIR-IRIS", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + } + ], + "paths": { + "/connectors": { + "get": { + "tags": [ + "Connectors" + ], + "summary": "List all available connectors", + "description": "Endpoint to list all available connectors. It processes each connector to verify the connection and returns the results.", + "responses": { + "200": { + "description": "A JSON response containing the list of all available connectors along with their connection verification status.", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "connectionSuccessful": { + "type": "boolean" + }, + "connector_api_key": { + "type": "string", + "nullable": true + }, + "connector_last_updated": { + "type": "string", + "format": "date-time" + }, + "connector_name": { + "type": "string" + }, + "connector_password": { + "type": "string", + "nullable": true + }, + "connector_type": { + "type": "string" + }, + "connector_url": { + "type": "string" + }, + "connector_username": { + "type": "string", + "nullable": true + }, + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + } + } + } + } + } + } + } + }, + "/connectors/{id}": { + "get": { + "tags": [ + "Connectors" + ], + "summary": "Get the details of a specific connector.", + "operationId": "getConnectorDetails", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "The id of the connector to be fetched.", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "responses": { + "200": { + "description": "A JSON response containing the details of the connector.", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "connectionSuccessful": { + "type": "boolean" + }, + "connector_api_key": { + "type": "string", + "nullable": true + }, + "connector_last_updated": { + "type": "string", + "format": "date-time" + }, + "connector_name": { + "type": "string" + }, + "connector_password": { + "type": "string", + "nullable": true + }, + "connector_type": { + "type": "string" + }, + "connector_url": { + "type": "string" + }, + "connector_username": { + "type": "string", + "nullable": true + }, + "id": { + "type": "integer" + } + } + } + } + } + }, + "404": { + "description": "The connector could not be found." + } + } + }, + "put": { + "tags": [ + "Connectors" + ], + "summary": "Update a connector", + "description": "Endpoint to update a connector. If the update operation was successful, it returns the connection verification status for the updated connector.", + "operationId": "update_connector_route", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "The id of the connector to be updated.", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "connector_url": { + "type": "string" + }, + "connector_username": { + "type": "string", + "nullable": true + }, + "connector_password": { + "type": "string", + "nullable": true + }, + "connector_api_key": { + "type": "string", + "nullable": true + } + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "A JSON response containing the connection verification status of the updated connector.", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "connected": { + "type": "boolean" + }, + "message": { + "type": "string" + } + } + } + } + } + }, + "400": { + "description": "If the request data is invalid, it returns a 400 status with an error message.", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "success": { + "type": "boolean" + } + } + } + } + } + }, + "404": { + "description": "If the connector is not found, it returns a 404 status with an error message.", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "success": { + "type": "boolean" + } + } + } + } + } + } + } + } + }, + "/agents": { + "get": { + "tags": [ + "Agents" + ], + "summary": "List all available agents", + "description": "Endpoint to list all available agents. It processes each agent to verify the connection and returns the results.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Agent" + } + } + } + } + } + } + } + }, + "/agents/{id}": { + "get": { + "tags": [ + "Agents" + ], + "summary": "Get details of a specific agent", + "description": "Endpoint to get the details of an agent.", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "description": "ID of the agent to be fetched", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Agent" + } + } + } + } + } + } + }, + "/agents/{id}/critical": { + "post": { + "tags": [ + "Agents" + ], + "summary": "Marks an agent as critical", + "description": "Marks an agent as a critical asset in the database.", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of the agent to be marked as critical", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "Agent marked as critical", + "schema": { + "$ref": "#/definitions/Agent" + } + }, + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Agent not found" + } + } + } + }, + "/agents/{id}/delete": { + "post": { + "tags": [ + "Agents" + ], + "summary": "Deletes an agent", + "description": "Deletes an agent from the database.", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of the agent to be deleted", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "Agent deleted", + "schema": { + "$ref": "#/definitions/Agent" + } + }, + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Agent not found" + } + } + } + + }, + "/agents/{id}/noncritical": { + "post": { + "tags": [ + "Agents" + ], + "summary": "Unmarks an agent as critical", + "description": "Marks an agent as a non-critical asset in the database.", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of the agent to be unmarked as critical", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "Agent unmarked as critical", + "schema": { + "$ref": "#/definitions/Agent" + } + }, + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Agent not found" + } + } + } + }, + "/agents/sync": { + "post": { + "summary": "Sync all agents", + "description": "Endpoint to sync all agents.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Operation message" + }, + "success": { + "type": "boolean", + "description": "Indicates if the operation was successful" + }, + "agents_added": { + "type": "array", + "items": { + "type": "object", + "description": "Agent details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "syncAgents", + "tags": [ + "Agents" + ] + } + }, + "/agents/{id}/vulnerabilities": { + "get": { + "tags": [ + "Agents" + ], + "summary": "Get vulnerabilities of a specific agent", + "description": "Endpoint to get the vulnerabilities of an agent.", + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "description": "ID of the agent to be fetched", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Vulnerability" + } + } + } + } + } + } + } + }, + "/rule/disable": { + "post": { + "summary": "Disable a rule", + "description": "Endpoint to disable a rule.", + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "rule_id": { + "type": "string", + "description": "The ID of the rule to be disabled." + }, + "reason": { + "type": "string", + "description": "The reason for disabling the rule." + }, + "length_of_time": { + "type": "integer", + "description": "The length of time the rule should be disabled for." + } + } + } + } + } + }, + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Operation message" + }, + "success": { + "type": "boolean", + "description": "Indicates if the operation was successful" + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "disableRule", + "tags": [ + "Rules" + ] + } + }, + "/rule/enable": { + "post": { + "summary": "Enable a rule", + "description": "Endpoint to enable a rule.", + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "rule_id": { + "type": "string", + "description": "The ID of the rule to be enabled." + } + } + } + } + } + }, + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Operation message" + }, + "success": { + "type": "boolean", + "description": "Indicates if the operation was successful" + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "enableRule", + "tags": [ + "Rules" + ] + } + }, + "/graylog/messages": { + "get": { + "summary": "Get messages from Graylog", + "description": "Endpoint to get messages from Graylog.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "messages": { + "type": "array", + "items": { + "type": "object", + "description": "Message details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getGraylogMessages", + "tags": [ + "Graylog" + ] + } + }, + "/graylog/metrics": { + "get": { + "summary": "Get metrics from Graylog", + "description": "Endpoint to get metrics from Graylog.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "metrics": { + "type": "array", + "items": { + "type": "object", + "description": "Metric details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getGraylogMetrics", + "tags": [ + "Graylog" + ] + } + }, + "/graylog/indices": { + "get": { + "summary": "Get indices from Graylog", + "description": "Endpoint to get indices from Graylog.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "indices": { + "type": "array", + "items": { + "type": "object", + "description": "Index details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getGraylogIndices", + "tags": [ + "Graylog" + ] + } + }, + "/graylog/indices/{index_name}/delete": { + "delete": { + "tags": [ + "Graylog" + ], + "summary": "Deletes a Graylog index", + "description": "Endpoint to delete a Graylog index.", + "parameters": [ + { + "name": "index_name", + "in": "path", + "description": "The name of the index to be deleted.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "Successful operation", + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "success": { + "type": "boolean" + } + } + } + }, + "400": { + "description": "Bad request", + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "success": { + "type": "boolean" + } + } + } + }, + "404": { + "description": "Index not found", + "schema": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "success": { + "type": "boolean" + } + } + } + } + } + } + }, + "/graylog/inputs": { + "get": { + "summary": "Get inputs from Graylog", + "description": "Endpoint to get inputs from Graylog.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "inputs": { + "type": "array", + "items": { + "type": "object", + "description": "Input details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getGraylogInputs", + "tags": [ + "Graylog" + ] + } + }, + "/alerts": { + "get": { + "summary": "Get alerts", + "description": "Endpoint to get alerts.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "alerts": { + "type": "array", + "items": { + "type": "object", + "description": "Alert details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAlerts", + "tags": [ + "Wazuh-Indexer" + ] + } + }, + "/wazuh_indexer/allocation": { + "get": { + "summary": "Get node allocation of the Wazuh-Indexer nodes", + "description": "Endpoint to get node allocation.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "indices": { + "type": "array", + "items": { + "type": "object", + "description": "Node details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getNodeAllocation", + "tags": [ + "Wazuh-Indexer" + ] + } + }, + "/wazuh_indexer/indices": { + "get": { + "summary": "Get indices from Wazuh-Indexer", + "description": "Endpoint to get indices from Wazuh-Indexer.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "indices": { + "type": "array", + "items": { + "type": "object", + "description": "Index details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getWazuhIndices", + "tags": [ + "Wazuh-Indexer" + ] + } + }, + "/wazuh_indexer/health": { + "get": { + "summary": "Get health of the Wazuh-Indexer nodes", + "description": "Endpoint to get health.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "cluster_name": { + "type": "string" + }, + "status": { + "type": "string" + }, + "number_of_nodes": { + "type": "integer" + }, + "number_of_data_nodes": { + "type": "integer" + }, + "active_primary_shards": { + "type": "integer" + }, + "active_shards": { + "type": "integer" + }, + "relocating_shards": { + "type": "integer" + }, + "initializing_shards": { + "type": "integer" + }, + "unassigned_shards": { + "type": "integer" + }, + "delayed_unassigned_shards": { + "type": "integer" + }, + "number_of_pending_tasks": { + "type": "integer" + }, + "number_of_in_flight_fetch": { + "type": "integer" + }, + "task_max_waiting_in_queue_millis": { + "type": "integer" + }, + "active_shards_percent_as_number": { + "type": "integer" + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getHealth", + "tags": [ + "Wazuh-Indexer" + ] + } + }, + "/wazuh_indexer/shards": { + "get": { + "summary": "Get shards from Wazuh-Indexer", + "description": "Endpoint to get shards from Wazuh-Indexer.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "shards": { + "type": "array", + "items": { + "type": "object", + "description": "Shard details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getShards", + "tags": [ + "Wazuh-Indexer" + ] + } + }, + "/shuffle/workflows": { + "get": { + "summary": "Get all workflows", + "description": "Endpoint to get all workflows.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "workflows": { + "type": "array", + "items": { + "type": "object", + "description": "Workflow details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllWorkflows", + "tags": [ + "Shuffle" + ] + } + }, + "/shuffle/workflows/executions": { + "get": { + "summary": "Get all workflow executions", + "description": "Endpoint to get all workflow executions.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "executions": { + "type": "array", + "items": { + "type": "object", + "description": "Workflow execution details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllWorkflowExecutions", + "tags": [ + "Shuffle" + ] + } + }, + "/shuffle/workflows/executions/{workflow_id}": { + "get": { + "summary": "Get workflow executions by workflow id", + "description": "Endpoint to get workflow executions by workflow id.", + "parameters": [ + { + "name": "workflow_id", + "in": "path", + "description": "The workflow id", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "executions": { + "type": "array", + "items": { + "type": "object", + "description": "Workflow execution details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getWorkflowExecutionsByWorkflowId", + "tags": [ + "Shuffle" + ] + } + }, + "/velociraptor/artifacts": { + "get": { + "summary": "Get all artifacts", + "description": "Endpoint to get all artifacts.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "description": "Artifact details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllArtifacts", + "tags": [ + "Velociraptor" + ] + } + }, + "/velociraptor/artifacts/linux": { + "get": { + "summary": "Get all linux artifacts", + "description": "Endpoint to get all linux artifacts.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "description": "Artifact details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllLinuxArtifacts", + "tags": [ + "Velociraptor" + ] + } + }, + "/velociraptor/artifacts/windows": { + "get": { + "summary": "Get all windows artifacts", + "description": "Endpoint to get all windows artifacts.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "description": "Artifact details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllWindowsArtifacts", + "tags": [ + "Velociraptor" + ] + } + }, + "/velociraptor/artifacts/mac": { + "get": { + "summary": "Get all mac artifacts", + "description": "Endpoint to get all mac artifacts.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "description": "Artifact details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllMacArtifacts", + "tags": [ + "Velociraptor" + ] + } + }, + "/velociraptor/artifacts/collection": { + "post": { + "summary": "Create a new artifact collection", + "description": "Endpoint to create a new artifact collection.", + "requestBody": { + "description": "Artifact collection details", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "artifact_name": { + "type": "string", + "description": "The name of the artifact collection." + }, + "client_name": { + "type": "string", + "description": "The name of the client to collect the artifact for." + } + } + } + } + } + }, + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "collection": { + "type": "object", + "description": "Artifact collection details" + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "createArtifactCollection", + "tags": [ + "Velociraptor" + ] + } + }, + "/dfir_iris/cases": { + "get": { + "summary": "Get all cases", + "description": "Endpoint to get all cases.", + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "cases": { + "type": "array", + "items": { + "type": "object", + "description": "Case details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllCases", + "tags": [ + "DFIR Iris" + ] + } + }, + "/dfir_iris/cases/{case_id}": { + "get": { + "summary": "Get a case", + "description": "Endpoint to get a case.", + "parameters": [ + { + "name": "case_id", + "in": "path", + "description": "The ID of the case.", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "case": { + "type": "object", + "description": "Case details" + } + } + } + } + } + }, + "404": { + "description": "Case not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getCase", + "tags": [ + "DFIR Iris" + ] + } + }, + "/dfir_iris/cases/{case_id}/notes": { + "get": { + "summary": "Get all notes for a case", + "description": "Endpoint to get all notes for a case.", + "parameters": [ + { + "name": "case_id", + "in": "path", + "description": "The ID of the case.", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "notes": { + "type": "array", + "items": { + "type": "object", + "description": "Note details" + } + } + } + } + } + } + }, + "404": { + "description": "Case not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllNotesForCase", + "tags": [ + "DFIR Iris" + ] + } + }, + "/dfir_iris/cases/{case_id}/note": { + "post": { + "summary": "Create a note for a case", + "description": "Endpoint to create a note for a case.", + "parameters": [ + { + "name": "case_id", + "in": "path", + "description": "The ID of the case.", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "requestBody": { + "description": "Note details", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "note_title": { + "type": "string", + "description": "The title of the note" + }, + "note_content": { + "type": "string", + "description": "The content of the note" + } + }, + "description": "Note details" + } + } + } + }, + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "note": { + "type": "object", + "properties": { + "title": { + "type": "string", + "description": "The title of the note" + }, + "content": { + "type": "string", + "description": "The content of the note" + } + }, + "description": "Note details" + } + } + } + } + } + }, + "404": { + "description": "Case not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "createNoteForCase", + "tags": [ + "DFIR Iris" + ] + } + }, + "/dfir_iris/cases/{case_id}/assets": { + "get": { + "summary": "Get all assets for a case", + "description": "Endpoint to get all assets for a case.", + "parameters": [ + { + "name": "case_id", + "in": "path", + "description": "The ID of the case.", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "assets": { + "type": "array", + "items": { + "type": "object", + "description": "Asset details" + } + } + } + } + } + } + }, + "404": { + "description": "Case not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllAssetsForCase", + "tags": [ + "DFIR Iris" + ] + } + }, + "/dfir_iris/alerts": { + "get": { + "summary": "Get all alerts", + "description": "Endpoint to get all alerts.", + "parameters": [ + { + "name": "limit", + "in": "query", + "description": "The maximum number of alerts to return.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "offset", + "in": "query", + "description": "The offset to start returning alerts from.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "sort", + "in": "query", + "description": "The field to sort alerts by.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "order", + "in": "query", + "description": "The order to sort alerts by.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "filter", + "in": "query", + "description": "The filter to apply to the alerts.", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Successful operation", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "alerts": { + "type": "array", + "items": { + "type": "object", + "description": "Alert details" + } + } + } + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + }, + "operationId": "getAllAlerts", + "tags": [ + "DFIR Iris" + ] + } + } + }, + "components": { + "schemas": { + "Agent": { + "type": "object", + "properties": { + "agent_id": { + "type": "string", + "description": "The ID of the agent." + }, + "critical_asset": { + "type": "boolean", + "description": "Flag indicating if the agent is a critical asset." + }, + "hostname": { + "type": "string", + "description": "The hostname of the agent." + }, + "id": { + "type": "integer", + "description": "The primary key of the agent." + }, + "ip_address": { + "type": "string", + "description": "The IP address of the agent." + }, + "last_seen": { + "type": "string", + "format": "date-time", + "description": "The last time the agent was seen." + }, + "os": { + "type": "string", + "description": "The operating system of the agent." + } + }, + "required": [ + "agent_id", + "hostname", + "id", + "ip_address", + "last_seen", + "os" + ] + } + } + } +} diff --git a/backend/app/utils.py b/backend/app/utils.py new file mode 100644 index 00000000..3498ac54 --- /dev/null +++ b/backend/app/utils.py @@ -0,0 +1,3 @@ +def allowed_file(filename): + ALLOWED_EXTENSIONS = {"yaml", "txt"} + return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS diff --git a/backend/copilot.py b/backend/copilot.py new file mode 100644 index 00000000..79392e22 --- /dev/null +++ b/backend/copilot.py @@ -0,0 +1,21 @@ +from flask import Flask +from app import db +from app import app + +from loguru import logger + + +logger.add( + "debug.log", + format="{time} {level} {message}", + level="INFO", + rotation="10 MB", + compression="zip", +) +logger.debug("Starting CoPilot...") + +with app.app_context(): + db.create_all() + +if __name__ == "__main__": + app.run(debug=True) diff --git a/backend/migrations/env.py b/backend/migrations/env.py new file mode 100644 index 00000000..359a6ca4 --- /dev/null +++ b/backend/migrations/env.py @@ -0,0 +1,106 @@ +import logging +from logging.config import fileConfig + +from alembic import context +from flask import current_app + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger("alembic.env") + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions["migrate"].db.get_engine() + except TypeError: + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions["migrate"].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace("%", "%%") + except AttributeError: + return str(get_engine().url).replace("%", "%%") + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option("sqlalchemy.url", get_engine_url()) +target_db = current_app.extensions["migrate"].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, "metadatas"): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure(url=url, target_metadata=get_metadata(), literal_binds=True) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, "autogenerate", False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info("No changes in schema detected.") + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=get_metadata(), + process_revision_directives=process_revision_directives, + **current_app.extensions["migrate"].configure_args, + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/requirements.in b/backend/requirements.in new file mode 100644 index 00000000..675b8930 --- /dev/null +++ b/backend/requirements.in @@ -0,0 +1,19 @@ +elasticsearch7==7.10.1 +environs +flask +flask-cors +flask-marshmallow +flask-migrate +flask-sqlalchemy +loguru +marshmallow-sqlalchemy +mitreattack-python +openai +pika +psycopg2-binary +pyvelociraptor~=0.1 +requests +xmltodict +blueprint +flask-swagger-ui +flask_cors diff --git a/backend/settings.py b/backend/settings.py new file mode 100644 index 00000000..c34bf1f8 --- /dev/null +++ b/backend/settings.py @@ -0,0 +1,25 @@ +"""Application configuration. + +Most configuration is set via environment variables. + +For local development, use a .env file to set +environment variables. +""" +from pathlib import Path + +from environs import Env + +env = Env() +env.read_env() + +basedir = Path().absolute() +db_path = str(basedir / "copilot.db") + +ENV = env.str("SECRET_KEY", default="production") +DEBUG = env.bool("FLASK_DEBUG", default=False) +SECRET_KEY = env.str("SECRET_KEY", "not-a-secret") +SQLALCHEMY_DATABASE_URI = env.str("SQLALCHEMY_DATABASE_URI", f"sqlite:///{db_path}") +SQLALCHEMY_TRACK_MODIFICATIONS = env.bool( + "SQLALCHEMY_TRACK_MODIFICATIONS", default=False +) +UPLOAD_FOLDER = env.str("UPLOAD_FOLDER", str(Path.home() / "Desktop/copilot_uploads")) diff --git a/cypress.config.ts b/cypress.config.ts new file mode 100644 index 00000000..f2dc7cb9 --- /dev/null +++ b/cypress.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "cypress" + +export default defineConfig({ + e2e: { + specPattern: "cypress/e2e/**/*.{cy,spec}.{js,jsx,ts,tsx}", + baseUrl: "http://localhost:4173" + } +})