diff --git a/go.mod b/go.mod index a16ea28..346128f 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,7 @@ require ( ) require ( + github.com/cilium/ebpf v0.19.0 // indirect github.com/google/uuid v1.6.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/now v1.1.5 // indirect diff --git a/go.sum b/go.sum index c5df534..9d3736d 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,5 @@ +github.com/cilium/ebpf v0.19.0 h1:Ro/rE64RmFBeA9FGjcTc+KmCeY6jXmryu6FfnzPRIao= +github.com/cilium/ebpf v0.19.0/go.mod h1:fLCgMo3l8tZmAdM3B2XqdFzXBpwkcSTroaVqN08OWVY= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= diff --git a/handlers/consumer/consumer.go b/handlers/consumer/consumer.go new file mode 100644 index 0000000..5ddf80b --- /dev/null +++ b/handlers/consumer/consumer.go @@ -0,0 +1,60 @@ +package consumer + +import ( + "net/http" + "time" + "vm-server/jobs" + "vm-server/models" + services "vm-server/services/consumer" + + "github.com/google/uuid" + "github.com/labstack/echo/v4" +) + +// Handler holds the service and scanner for the scan handlers. +type Handler struct { + Service *services.Service + Scanner *jobs.ConsumerScanner +} + +// NewHandler creates a new scan handler. +func NewHandler(s *services.Service, sc *jobs.ConsumerScanner) *Handler { + return &Handler{Service: s, Scanner: sc} +} + +// ScanHandler handles scan related requests. +func (h *Handler) ConsumerHandler(c echo.Context) error { + var req models.ConsumerRequest + if err := c.Bind(&req); err != nil { + return c.JSON(http.StatusBadRequest, map[string]string{"error": "Invalid request payload"}) + } + + jobID := uuid.New().String() + newJob := &models.ConsumerJob{ + JobID: jobID, + Status: "pending", + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if err := h.Service.CreateConsumerJob(newJob); err != nil { + return c.JSON(http.StatusInternalServerError, map[string]string{"error": "Failed to create consumer job"}) + } + + // Run the scan in the background + // TODO - Make this a channel so we can perform operations on failed scans as well. + go h.Scanner.PerformScan(newJob, &req) + + return c.JSON(http.StatusAccepted, map[string]string{"jobId": jobID}) +} + +// ScanByIDHandler handles retrieving a scan by its ID. +func (h *Handler) ConsumerByIDHandler(c echo.Context) error { + id := c.Param("id") + consumerJob, err := h.Service.GetConsumerJob(id) + if err != nil { + return c.JSON(http.StatusNotFound, map[string]string{"error": "Consumer job not found"}) + } + + return c.JSON(http.StatusOK, consumerJob) +} diff --git a/jobs/consumer.go b/jobs/consumer.go new file mode 100644 index 0000000..cbf51b5 --- /dev/null +++ b/jobs/consumer.go @@ -0,0 +1,18 @@ +package jobs + +import ( + "vm-server/models" + consumer "vm-server/services/consumer" +) + +// ConsumerScanner defines the structure for our scanning job runner. +type ConsumerScanner struct { + Service *consumer.Service +} + +func (s *ConsumerScanner) PerformScan(job *models.ConsumerJob, req *models.ConsumerRequest) { + // TODO: This is where the trigger to add new config for the ebpf probe + // / start the ebpf probe + // / get info from the already running ebpf probe + // should happen +} diff --git a/models/consumer.go b/models/consumer.go new file mode 100644 index 0000000..4a8f3b2 --- /dev/null +++ b/models/consumer.go @@ -0,0 +1,61 @@ +package models + +import ( + "database/sql/driver" + "encoding/json" + "errors" + "time" + + "gorm.io/gorm" +) + +type ConsumerEntry struct { + gorm.Model + EntryID string `json:"entryId" gorm:"unique;index"` + Hostname string `json:"hostname"` + Executable string `json:"executable"` + User string `json:"user"` + PID string `json:"pid"` + StartTimestamp string `json:"startTimestamp"` + ConsumerJobID uint `json:"consumerJobId"` // Foreign key for ConsumerJob +} + +type ConsumerJob struct { + gorm.Model + JobID string `json:"jobId" gorm:"unique;index"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + FinishedAt time.Time `json:"finishedAt"` + Match ConsumerSliceMatch `json:"match" gorm:"type:text"` // One-to-many relationship +} + +type ConsumerSliceMatch []ConsumerMatchResumed +type ConsumerMatchResumed struct { + Hostname string `json:"hostname"` + Executable string `json:"executable"` + User string `json:"user"` + PID string `json:"pid"` + StartTimestamp string `json:"startTimestamp"` +} + +// Value implements the driver.Valuer interface, converting the slice to a JSON string. +func (s ConsumerSliceMatch) Value() (driver.Value, error) { + if len(s) == 0 { + return nil, nil + } + return json.Marshal(s) +} + +// Scan implements the sql.Scanner interface, converting the JSON string from the database to a slice. +func (s *ConsumerSliceMatch) Scan(value interface{}) error { + if value == nil { + *s = nil + return nil + } + bytes, ok := value.([]byte) + if !ok { + return errors.New("type assertion to []byte failed") + } + return json.Unmarshal(bytes, s) +} diff --git a/models/consumer_request.go b/models/consumer_request.go new file mode 100644 index 0000000..081d61b --- /dev/null +++ b/models/consumer_request.go @@ -0,0 +1,18 @@ +package models + +type ConsumerRequest struct { + Location ConsumerLocation `json:"location"` + Paths []string `json:"paths"` +} + +type ConsumerLocation struct { + Kind string `json:"kind"` + Name string `json:"name"` + APIVersion string `json:"apiVersion"` + RemoteRef RemoteRef `json:"remoteRef"` +} + +type ConsumerRemoteRef struct { + Key string `json:"key"` + Property string `json:"property"` +} diff --git a/models/consumer_response.go b/models/consumer_response.go new file mode 100644 index 0000000..53ae545 --- /dev/null +++ b/models/consumer_response.go @@ -0,0 +1,24 @@ +package models + +import "time" + +type ConsumerResponse struct { + ID string `json:"id"` +} + +type ConsumerIdResponse struct { + ID string `json:"id"` + Status ScanStatus `json:"status"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + FinishedAt time.Time `json:"finishedAt"` + Consumers []Consumer `json:"consumers"` +} + +type Consumer struct { + Hostname string `json:"hostname"` + Executable string `json:"executable"` + User string `json:"user"` + PID string `json:"pid"` + StartTimestamp string `json:"startTimestamp"` +} diff --git a/services/consumer/consumers.go b/services/consumer/consumers.go new file mode 100644 index 0000000..360e6af --- /dev/null +++ b/services/consumer/consumers.go @@ -0,0 +1,57 @@ +package consumer + +import ( + "vm-server/models" + "vm-server/store/schema" +) + +// Service is the business logic layer for consumers. +type Service struct { + Store schema.Store +} + +// NewService creates a new consumer service. +func NewService(s schema.Store) *Service { + return &Service{Store: s} +} + +// CreateConsumerJob creates a new consumer job. +func (s *Service) CreateConsumerJob(consumerJob *models.ConsumerJob) error { + // Business logic for creating a job can go here. + return s.Store.CreateConsumerJob(consumerJob) +} + +// GetConsumerJob retrieves a consumer job by its ID. +func (s *Service) GetConsumerJob(id string) (*models.ConsumerJob, error) { + return s.Store.GetConsumerJob(id) +} + +// CreateConsumerEntry creates a new consumer entry. +func (s *Service) CreateConsumerEntry(consumerEntry *models.ConsumerEntry) error { + // Business logic for creating an entry can go here. + return s.Store.CreateConsumerEntry(consumerEntry) +} + +// GetConsumerEntry retrieves a consumer entry by its EntryID. +func (s *Service) GetConsumerEntry(entryID string) (*models.ConsumerEntry, error) { + return s.Store.GetConsumerEntry(entryID) +} + +// UpdateConsumerEntry updates an existing consumer entry. +func (s *Service) UpdateConsumerEntry(consumerEntry *models.ConsumerEntry) error { + return s.Store.UpdateConsumerEntry(consumerEntry) +} + +// UpdateConsumerJob updates an existing consumer job. +func (s *Service) UpdateConsumerJob(consumerJob *models.ConsumerJob) error { + // Business logic for updating a job can go here. + return s.Store.UpdateConsumerJob(consumerJob) +} + +func (s *Service) DeleteConsumerEntry(consumerEntry *models.ConsumerEntry) error { + return s.Store.DeleteConsumerEntry(consumerEntry) +} + +func (s *Service) ListConsumerEntries() ([]models.ConsumerEntry, error) { + return s.Store.ListConsumerEntries() +} diff --git a/services/secrets/secrets.go b/services/secrets/secrets.go index e3bd6bb..02502f7 100644 --- a/services/secrets/secrets.go +++ b/services/secrets/secrets.go @@ -1,7 +1,6 @@ package secrets import ( - "fmt" "os" "vm-server/models" "vm-server/services/scan" @@ -25,15 +24,10 @@ func (s *Service) UpdateVersion(newValue []byte, entry *models.ScanEntry) error return err } newFileLength := len(file) - (entry.EndLine - entry.StartLine + 1) + len(newValue) - fmt.Printf("old file length: %v\nold Entry Len: %v\n, new Entry Len: %v\n, new file len: %v\n", len(file), entry.EndLine-entry.StartLine+1, len(newValue), newFileLength) modFile := make([]byte, newFileLength) - fmt.Printf("modifying %v with %v", string(file[entry.StartLine:entry.EndLine]), string(newValue)) copy(modFile[:entry.StartLine], file[:entry.StartLine]) - fmt.Printf("New modFile before start: %v\n", string(modFile[:entry.StartLine])) copy(modFile[entry.StartLine:entry.StartLine+len(newValue)], newValue) - fmt.Printf("New modFile up until beginning: %v\n", string(modFile[:entry.StartLine+len(newValue)])) copy(modFile[entry.StartLine+len(newValue):], file[entry.EndLine+1:]) - fmt.Printf("New modFile after end: %v\n", string(modFile)) err = os.WriteFile(entry.FilePath, modFile, 0644) if err != nil { diff --git a/store/gorm/store.go b/store/gorm/store.go index 3b7af10..77919e5 100644 --- a/store/gorm/store.go +++ b/store/gorm/store.go @@ -27,7 +27,7 @@ func NewStore() (schema.Store, error) { log.Println("Database connection established.") // Auto-migrate the schema for ScanJob and ScanEntry models. - err = db.AutoMigrate(&models.ScanJob{}, &models.ScanEntry{}) + err = db.AutoMigrate(&models.ScanJob{}, &models.ScanEntry{}, &models.ConsumerJob{}, &models.ConsumerEntry{}) if err != nil { // Attempt to close the database connection if migration fails. sqlDB, _ := db.DB() @@ -113,3 +113,71 @@ func (s *StoreGorm) ListScanEntries() ([]models.ScanEntry, error) { result := s.DB.Find(&scanEntries) return scanEntries, result.Error } + +// Consumer Job +func (s *StoreGorm) CreateConsumerJob(consumerJob *models.ConsumerJob) error { + result := s.DB.Create(consumerJob) + return result.Error +} + +func (s *StoreGorm) GetConsumerJob(id string) (*models.ConsumerJob, error) { + var consumerJob models.ConsumerJob + result := s.DB.First(&consumerJob, "job_id = ?", id) + if result.Error != nil { + if errors.Is(result.Error, gorm.ErrRecordNotFound) { + return nil, &models.NotFoundErr{} + } + return nil, result.Error + } + return &consumerJob, nil +} + +func (s *StoreGorm) UpdateConsumerJob(consumerJob *models.ConsumerJob) error { + result := s.DB.Save(consumerJob) + return result.Error +} + +func (s *StoreGorm) DeleteConsumerJob(consumerJob *models.ConsumerJob) error { + result := s.DB.Delete(consumerJob) + return result.Error +} + +func (s *StoreGorm) ListConsumerJobs() ([]models.ConsumerJob, error) { + var consumerJobs []models.ConsumerJob + result := s.DB.Find(&consumerJobs) + return consumerJobs, result.Error +} + +// Consumer Entry +func (s *StoreGorm) CreateConsumerEntry(consumerEntry *models.ConsumerEntry) error { + result := s.DB.Create(consumerEntry) + return result.Error +} + +func (s *StoreGorm) GetConsumerEntry(entryID string) (*models.ConsumerEntry, error) { + var consumerEntry models.ConsumerEntry + result := s.DB.First(&consumerEntry, "entry_id = ?", entryID) + if result.Error != nil { + if errors.Is(result.Error, gorm.ErrRecordNotFound) { + return nil, &models.NotFoundErr{} + } + return nil, result.Error + } + return &consumerEntry, nil +} + +func (s *StoreGorm) UpdateConsumerEntry(consumerEntry *models.ConsumerEntry) error { + result := s.DB.Save(consumerEntry) + return result.Error +} + +func (s *StoreGorm) DeleteConsumerEntry(consumerEntry *models.ConsumerEntry) error { + result := s.DB.Delete(consumerEntry) + return result.Error +} + +func (s *StoreGorm) ListConsumerEntries() ([]models.ConsumerEntry, error) { + var consumerEntries []models.ConsumerEntry + result := s.DB.Find(&consumerEntries) + return consumerEntries, result.Error +} diff --git a/store/memory/store.go b/store/memory/store.go index 0b1098a..9875631 100644 --- a/store/memory/store.go +++ b/store/memory/store.go @@ -11,6 +11,8 @@ import ( const MAX_SCAN_JOB_SIZE = 1000 const MAX_SCAN_ENTRY_SIZE = 10000 +const MAX_CONSUMER_JOB_SIZE = 1000 +const MAX_CONSUMER_ENTRY_SIZE = 10000 const CLEANUP_TO = 0.8 @@ -21,6 +23,10 @@ type StoreMemory struct { scanJobs map[uuid.UUID]*models.ScanJob scanEntries map[uuid.UUID]*models.ScanEntry scanEntriesFingerprint map[string]uuid.UUID + consumerJobAge map[uuid.UUID]time.Time + consumerJobs map[uuid.UUID]*models.ConsumerJob + consumerEntryAge map[uuid.UUID]time.Time + consumerEntries map[uuid.UUID]*models.ConsumerEntry mu sync.RWMutex } @@ -32,6 +38,10 @@ func NewStore() (schema.Store, error) { scanEntriesFingerprint: make(map[string]uuid.UUID), scanJobAge: make(map[uuid.UUID]time.Time), scanEntryAge: make(map[uuid.UUID]time.Time), + consumerJobAge: make(map[uuid.UUID]time.Time), + consumerJobs: make(map[uuid.UUID]*models.ConsumerJob), + consumerEntryAge: make(map[uuid.UUID]time.Time), + consumerEntries: make(map[uuid.UUID]*models.ConsumerEntry), }, nil } @@ -243,3 +253,197 @@ func (s *StoreMemory) ListScanEntries() ([]models.ScanEntry, error) { } return scanEntries, nil } + +// Consumer Job +func (s *StoreMemory) CreateConsumerJob(consumerJob *models.ConsumerJob) error { + s.mu.Lock() + defer s.mu.Unlock() + if len(s.consumerJobs) >= MAX_SCAN_JOB_SIZE { + s.cleanupConsumerJobs() + } + uuid, err := uuid.Parse(consumerJob.JobID) + if err != nil { + return err + } + if _, ok := s.consumerJobs[uuid]; ok { + return &models.AlreadyExistErr{} + } + s.consumerJobs[uuid] = consumerJob + s.consumerJobAge[uuid] = time.Now() + return nil +} + +func (s *StoreMemory) GetConsumerJob(id string) (*models.ConsumerJob, error) { + s.mu.RLock() + uuid, err := uuid.Parse(id) + if err != nil { + s.mu.RUnlock() + return nil, err + } + consumerJob, ok := s.consumerJobs[uuid] + if !ok { + s.mu.RUnlock() + return nil, &models.NotFoundErr{} + } + s.mu.RUnlock() + + s.mu.Lock() + defer s.mu.Unlock() + s.consumerJobAge[uuid] = time.Now() + return consumerJob, nil +} + +func (s *StoreMemory) UpdateConsumerJob(consumerJob *models.ConsumerJob) error { + s.mu.Lock() + defer s.mu.Unlock() + uuid, err := uuid.Parse(consumerJob.JobID) + if err != nil { + return err + } + if _, ok := s.consumerJobs[uuid]; !ok { + return &models.NotFoundErr{} + } + s.consumerJobs[uuid] = consumerJob + s.consumerJobAge[uuid] = time.Now() + return nil +} + +func (s *StoreMemory) DeleteConsumerJob(consumerJob *models.ConsumerJob) error { + s.mu.Lock() + defer s.mu.Unlock() + uuid, err := uuid.Parse(consumerJob.JobID) + if err != nil { + return err + } + if _, ok := s.consumerJobs[uuid]; !ok { + // Already deleted, operation ok + return nil + } + delete(s.consumerJobs, uuid) + delete(s.consumerJobAge, uuid) + return nil +} + +// Consumer Entry +func (s *StoreMemory) CreateConsumerEntry(consumerEntry *models.ConsumerEntry) error { + s.mu.Lock() + defer s.mu.Unlock() + if len(s.consumerEntries) >= MAX_SCAN_ENTRY_SIZE { + s.cleanupConsumerEntries() + } + uuid, err := uuid.Parse(consumerEntry.EntryID) + if err != nil { + return err + } + if _, ok := s.consumerEntries[uuid]; ok { + return &models.AlreadyExistErr{} + } + s.consumerEntries[uuid] = consumerEntry + s.consumerEntryAge[uuid] = time.Now() + return nil +} +func (s *StoreMemory) cleanupConsumerEntries() { + targetSize := int(MAX_CONSUMER_ENTRY_SIZE * CLEANUP_TO) + for len(s.consumerEntries) > targetSize { + var oldestID uuid.UUID + var oldestTime time.Time + first := true + for id, t := range s.consumerEntryAge { + if first { + oldestTime = t + oldestID = id + first = false + continue + } + if t.Before(oldestTime) { + oldestTime = t + oldestID = id + } + } + delete(s.consumerEntries, oldestID) + delete(s.consumerEntryAge, oldestID) + } +} + +func (s *StoreMemory) cleanupConsumerJobs() { + targetSize := int(MAX_CONSUMER_JOB_SIZE * CLEANUP_TO) + for len(s.consumerJobs) > targetSize { + var oldestID uuid.UUID + var oldestTime time.Time + first := true + for id, t := range s.consumerJobAge { + if first { + oldestTime = t + oldestID = id + first = false + continue + } + if t.Before(oldestTime) { + oldestTime = t + oldestID = id + } + } + delete(s.consumerEntries, oldestID) + delete(s.consumerEntryAge, oldestID) + } +} +func (s *StoreMemory) GetConsumerEntry(entryID string) (*models.ConsumerEntry, error) { + s.mu.RLock() + uuid, err := uuid.Parse(entryID) + if err != nil { + s.mu.RUnlock() + return nil, err + } + consumerEntry, ok := s.consumerEntries[uuid] + if !ok { + s.mu.RUnlock() + return nil, &models.NotFoundErr{} + } + s.mu.RUnlock() + + s.mu.Lock() + defer s.mu.Unlock() + s.consumerEntryAge[uuid] = time.Now() + return consumerEntry, nil +} + +func (s *StoreMemory) UpdateConsumerEntry(consumerEntry *models.ConsumerEntry) error { + s.mu.Lock() + defer s.mu.Unlock() + uuid, err := uuid.Parse(consumerEntry.EntryID) + if err != nil { + return err + } + if _, ok := s.consumerEntries[uuid]; !ok { + return &models.NotFoundErr{} + } + s.consumerEntries[uuid] = consumerEntry + s.consumerEntryAge[uuid] = time.Now() + return nil +} + +func (s *StoreMemory) DeleteConsumerEntry(consumerEntry *models.ConsumerEntry) error { + s.mu.Lock() + defer s.mu.Unlock() + uuid, err := uuid.Parse(consumerEntry.EntryID) + if err != nil { + return err + } + if _, ok := s.consumerEntries[uuid]; !ok { + // Already deleted, operation ok + return nil + } + delete(s.consumerEntries, uuid) + delete(s.consumerEntryAge, uuid) + return nil +} + +func (s *StoreMemory) ListConsumerEntries() ([]models.ConsumerEntry, error) { + s.mu.RLock() + defer s.mu.RUnlock() + var consumerEntries []models.ConsumerEntry + for _, consumerEntry := range s.consumerEntries { + consumerEntries = append(consumerEntries, *consumerEntry) + } + return consumerEntries, nil +} diff --git a/store/schema/schema.go b/store/schema/schema.go index 57129d7..068b9dd 100644 --- a/store/schema/schema.go +++ b/store/schema/schema.go @@ -7,13 +7,25 @@ type StoreProvider interface { } type Store interface { + // Scan Jobs CreateScanJob(scanJob *models.ScanJob) error GetScanJob(id string) (*models.ScanJob, error) + UpdateScanJob(scanJob *models.ScanJob) error + // Scan Entry CreateScanEntry(scanEntry *models.ScanEntry) error GetScanEntry(entryID string) (*models.ScanEntry, error) GetScanEntryByFingerprint(fingerprint string) (*models.ScanEntry, error) - UpdateScanJob(scanJob *models.ScanJob) error UpdateScanEntry(scanEntry *models.ScanEntry) error DeleteScanEntry(scanEntry *models.ScanEntry) error ListScanEntries() ([]models.ScanEntry, error) + // Consumer Jobs + GetConsumerJob(id string) (*models.ConsumerJob, error) + CreateConsumerJob(consumerJob *models.ConsumerJob) error + UpdateConsumerJob(scanJob *models.ConsumerJob) error + // Consumer Entry + GetConsumerEntry(entryId string) (*models.ConsumerEntry, error) + CreateConsumerEntry(consumerEntry *models.ConsumerEntry) error + UpdateConsumerEntry(consumerEntry *models.ConsumerEntry) error + DeleteConsumerEntry(consumerEntry *models.ConsumerEntry) error + ListConsumerEntries() ([]models.ConsumerEntry, error) }