diff --git a/.gitea/workflows/release.yaml b/.gitea/workflows/release.yaml new file mode 100644 index 0000000..6ab7fac --- /dev/null +++ b/.gitea/workflows/release.yaml @@ -0,0 +1,53 @@ +name: Build and Release Core + +on: + push: + tags: + - 'v*' # Only trigger when you push a version tag (e.g., v1.0.0) + pull_request: + branches: [ main ] # Run tests on PRs, but don't build release binaries + +jobs: + build: + name: Test and Build + runs-on: ubuntu-latest # This runs on your Gitea act_runner + steps: + - name: Checkout Code + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: '1.26' # Update to match your go.mod if different + + - name: Run Tests + run: go test ./... -v + + - name: Build Binaries + # Only run the build steps if this was triggered by a tag push + if: startsWith(github.ref, 'refs/tags/') + run: | + + VERSION=${GITHUB_REF_NAME} + COMMIT=$(echo ${GITHUB_SHA} | cut -c1-7) + + + LDFLAGS="-X 'main.BuildVersion=$VERSION' -X 'main.BuildCommit=$COMMIT'" + + mkdir -p bin + + echo "Building Linux (amd64)..." + GOOS=linux GOARCH=amd64 go build -ldflags="$LDFLAGS" -o bin/rr-linux-amd64 ./cmd/rr/main.go + + echo "Building macOS (Apple Silicon arm64)..." + GOOS=darwin GOARCH=arm64 go build -ldflags="$LDFLAGS" -o bin/rr-darwin-arm64 ./cmd/rr/main.go + + echo "Building Windows (amd64)..." + GOOS=windows GOARCH=amd64 go build -ldflags="$LDFLAGS" -o bin/rr-windows-amd64.exe ./cmd/rr/main.go + + - name: Upload Artifacts + if: startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@v4 + with: + name: riskrancher-core-binaries + path: bin/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index 5b90e79..a57b3bb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,27 +1,59 @@ -# ---> Go -# If you prefer the allow list template instead of the deny list, see community template: -# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore -# -# Binaries for programs and plugins +# ========================= +# Go Standard +# ========================= *.exe *.exe~ *.dll *.so *.dylib - -# Test binary, built with `go test -c` *.test - -# Output of the go coverage tool, specifically when used with LiteIDE *.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -# Go workspace file go.work go.work.sum +vendor/ -# env file +# ========================= +# RiskRancher Compiled Binaries +# ========================= +# Ignore the local builds so you don't accidentally push a 20MB executable +rr +rr.exe + +# ========================= +# Runtime Data & Uploads (CRITICAL) +# ========================= +# Ignore all runtime databases and uploaded pentest reports/images +/data/* +!/data/.keep +/backups/* +!/backups/.keep + +# SQLite temporary and journal files +*.db +*.db-shm +*.db-wal +*.sqlite +*.sqlite3 + +# ========================= +# Environment & Secrets +# ========================= .env +.env.* +!.env.example +# ========================= +# IDEs & OS Files +# ========================= +# GoLand / IntelliJ +.idea/ +*.iml + +# macOS +.DS_Store +.AppleDouble +.LSOverride + +# Windows +Thumbs.db +ehthumbs.db \ No newline at end of file diff --git a/README.md b/README.md index c9650d6..e77de0f 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,24 @@ -# core +# ๐Ÿด RiskRancher Core (Community Edition) -Open-source Risk-Based Vulnerability Management (RBVM). A high-performance, air-gapped single binary for finding ingestion, deduplication, and triage. \ No newline at end of file +> So simple your manager could deploy it. + +RiskRancher Core is an open-source **Risk-Based Vulnerability Management (RBVM)** and **ASPM** platform built for modern DevSecOps teams. Compiled as a lightning-fast, **air-gapped single Go binary** with an embedded SQLite database, it ingests, deduplicates, and routes millions of security findings from your CI/CD pipelines and scanners. + +No external databases to spin up, no Docker swarms to manage, and zero complex microservices. Just drop the binary on a server and start triaging. + +## ๐Ÿš€ Getting Started + +### Option A: Download the Binary (Recommended) +1. Go to the [Releases](#) tab and download the compiled executable for your OS (Windows/macOS/Linux). +2. Place the binary in a dedicated directory and execute it. +3. Visit `http://localhost:8080` in your browser. + +### Option B: Compile from Source +Ensure you have **Go 1.26+** installed (*CGO is required for the native `mattn/go-sqlite3` driver*). + +```bash +git clone [https://gitea.yourdomain.com/RiskRancher/core.git](https://gitea.yourdomain.com/RiskRancher/core.git) +cd core +go build -o rr ./cmd/rr/main.go +./rr +``` \ No newline at end of file diff --git a/backups/.keep b/backups/.keep new file mode 100644 index 0000000..e69de29 diff --git a/cmd/stresstest/main.go b/cmd/stresstest/main.go new file mode 100644 index 0000000..44cc01b --- /dev/null +++ b/cmd/stresstest/main.go @@ -0,0 +1,166 @@ +package main + +import ( + "bytes" + "database/sql" + "encoding/json" + "flag" + "fmt" + "log" + "net/http" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/auth" + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" +) + +func main() { + sizeFlag := flag.String("size", "small", "Choose 'small' (100 tickets) or 'large' (300,000 tickets)") + flag.Parse() + + totalTickets := 100 + batchSize := 100 + + if *sizeFlag == "large" { + totalTickets = 300000 + batchSize = 10000 // Ingest in chunks of 10k + } + + db := datastore.InitDB("./data/RiskRancher.db") + defer db.Close() + + log.Printf("๐Ÿงน Sweeping the ranch (Deleting old test data)...") + + db.Exec("DELETE FROM ticket_assignments") + db.Exec("DELETE FROM tickets") + db.Exec("DELETE FROM sync_logs") + db.Exec("DELETE FROM draft_tickets") + + // Reset the auto-increment counters so Ticket IDs reliably start at 1 + db.Exec("DELETE FROM sqlite_sequence") + + log.Println("โš™๏ธ Seeding global config, adapters, and SLA matrix...") + + db.Exec("INSERT OR IGNORE INTO app_config (id, timezone, business_start, business_end, default_extension_days) VALUES (1, 'America/New_York', 9, 17, 30)") + db.Exec("INSERT OR IGNORE INTO domains (name) VALUES ('Vulnerability'), ('Privacy'), ('Compliance'), ('Incident')") + db.Exec("INSERT OR IGNORE INTO departments (name) VALUES ('Security'), ('IT'), ('Privacy'), ('Legal'), ('Compliance')") + + slaQuery := `INSERT OR IGNORE INTO sla_policies (domain, severity, days_to_triage, days_to_remediate, max_extensions) VALUES + ('Vulnerability', 'Critical', 1, 3, 0), ('Vulnerability', 'High', 3, 14, 1), ('Vulnerability', 'Medium', 5, 30, 2), ('Vulnerability', 'Low', 8, 90, 3), ('Vulnerability', 'Info', 0, 0, 0)` + db.Exec(slaQuery) + + adapterQuery := `INSERT OR IGNORE INTO data_adapters (name, source_name, findings_path, mapping_title, mapping_asset, mapping_severity) VALUES + ('Trivy Container Security', 'Trivy', '.', 'title', 'asset', 'severity'), + ('GitHub Dependabot', 'Dependabot', '.', 'title', 'asset', 'severity'), + ('Tenable Nessus', 'Nessus', '.', 'title', 'asset', 'severity'), + ('Manual Entry API', 'Manual', '.', 'title', 'asset', 'severity')` + db.Exec(adapterQuery) + + validHash, _ := auth.HashPassword("password123") + + _, err := db.Exec("INSERT OR REPLACE INTO users (id, email, full_name, password_hash, global_role, is_active) VALUES (999, 'stress@ranch.com', 'Stress Tester', ?, 'Sheriff', 1)", validHash) + if err != nil { + log.Fatalf("๐Ÿšจ Failed to seed Stress User (Database locked?): %v", err) + } + + _, err = db.Exec("INSERT OR REPLACE INTO sessions (session_token, user_id, expires_at) VALUES ('stress_token_123', 999, datetime('now', '+1 hour'))") + if err != nil { + log.Fatalf("๐Ÿšจ Failed to seed Stress Session: %v", err) + } + + log.Println("==========================================================================") + log.Printf("๐Ÿš€ COMMENCING %d TICKET API LOAD TEST (%s mode)", totalTickets, *sizeFlag) + log.Println("โš ๏ธ CRITICAL: Ensure your RiskRancher server is running in another terminal!") + log.Println("==========================================================================") + time.Sleep(1 * time.Second) + + client := &http.Client{Timeout: 5 * time.Minute} + baseURL := "http://localhost:8080" + sessionCookie := &http.Cookie{Name: "session_token", Value: "stress_token_123"} + + ticketCounter := 1 + + log.Printf("๐Ÿ“ฅ PHASE 1: Ingesting via API in batches of %d...", batchSize) + for b := 0; b < totalTickets/batchSize; b++ { + var payload []map[string]string + + for i := 0; i < batchSize; i++ { + assetName := fmt.Sprintf("server-prod-%05d", (ticketCounter%50)+1) + + sev := "Medium" + if ticketCounter%10 == 0 { + sev = "Critical" + } else if ticketCounter%5 == 0 { + sev = "High" + } else if ticketCounter%2 == 0 { + sev = "Low" + } + + source := "Trivy" + if ticketCounter%3 == 0 { + source = "Dependabot" + } else if ticketCounter%7 == 0 { + source = "Nessus" + } + + payload = append(payload, map[string]string{ + "source": source, + "asset_identifier": assetName, + "title": fmt.Sprintf("Vulnerability-%06d", ticketCounter), + "severity": sev, + "description": fmt.Sprintf("Stress test vulnerability payload #%d", ticketCounter), + }) + ticketCounter++ + } + + body, _ := json.Marshal(payload) + req, _ := http.NewRequest(http.MethodPost, baseURL+"/api/ingest", bytes.NewBuffer(body)) + req.AddCookie(sessionCookie) + req.Header.Set("Content-Type", "application/json") + + resp, err := client.Do(req) + if err != nil { + log.Fatalf("๐Ÿšจ API Request failed: %v", err) + } + if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK { + log.Fatalf("๐Ÿšจ API returned unexpected status: %d", resp.StatusCode) + } + resp.Body.Close() + fmt.Printf("โœ… Ingested batch %d/%d (%d tickets)\n", b+1, totalTickets/batchSize, len(payload)) + } + + log.Println("\n๐Ÿ”€ PHASE 2: Distributing tickets to valid Core workflows...") + + unassignedEnd := int(float64(totalTickets) * 0.60) // 60% stay in Holding Pen + assignedEnd := int(float64(totalTickets) * 0.75) // 15% go to Chute + returnedEnd := int(float64(totalTickets) * 0.85) // 10% Returned to Security + falsePosEnd := int(float64(totalTickets) * 0.90) // 5% False Positive + patchedEnd := totalTickets // 10% Patched + + log.Printf("โณ Keeping Tickets 1 - %d in the Holding Pen (Unassigned)...", unassignedEnd) + bulkUpdateDB(db, unassignedEnd+1, assignedEnd, "Assigned Out", "it-network@ranch.com") + bulkUpdateDB(db, assignedEnd+1, returnedEnd, "Returned to Security", "it-endpoint@ranch.com") + bulkUpdateDB(db, returnedEnd+1, falsePosEnd, "False Positive", "security@ranch.com") + bulkUpdateDB(db, falsePosEnd+1, patchedEnd, "Patched", "it-network@ranch.com") + + log.Println("\n๐ŸŽ‰ STRESS TEST COMPLETE!") + log.Println("==========================================================================") + log.Println("๐Ÿค  The ranch is fully loaded with Core data. Go check the Dashboard!") + log.Println("๐Ÿ”‘ Login -> Email: stress@ranch.com | Password: password123") + log.Println("==========================================================================") +} + +// bulkUpdateDB executes direct SQLite updates +func bulkUpdateDB(db *sql.DB, startID, endID int, status, assignee string) { + if startID > endID { + return + } + fmt.Printf("Moving %d tickets (%d to %d) -> %s...\n", (endID-startID)+1, startID, endID, status) + + query := `UPDATE tickets SET status = ?, assignee = ?, latest_comment = 'Stress test auto-distribution', updated_at = CURRENT_TIMESTAMP WHERE id >= ? AND id <= ?` + + _, err := db.Exec(query, status, assignee, startID, endID) + if err != nil { + log.Fatalf("๐Ÿšจ DB update failed: %v", err) + } +} diff --git a/data/.keep b/data/.keep new file mode 100644 index 0000000..e69de29 diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..f9d9566 --- /dev/null +++ b/go.mod @@ -0,0 +1,8 @@ +module epigas.gitea.cloud/RiskRancher/core + +go 1.26.0 + +require ( + github.com/mattn/go-sqlite3 v1.14.34 + golang.org/x/crypto v0.48.0 +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..70d63c0 --- /dev/null +++ b/go.sum @@ -0,0 +1,4 @@ +github.com/mattn/go-sqlite3 v1.14.34 h1:3NtcvcUnFBPsuRcno8pUtupspG/GM+9nZ88zgJcp6Zk= +github.com/mattn/go-sqlite3 v1.14.34/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= diff --git a/pkg/adapters/adapters.go b/pkg/adapters/adapters.go new file mode 100644 index 0000000..9ce4c76 --- /dev/null +++ b/pkg/adapters/adapters.go @@ -0,0 +1,147 @@ +package adapters + +import ( + "crypto/sha256" + "encoding/hex" + "encoding/json" + "log" + "net/http" + "strconv" + "strings" + + domain2 "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (h *Handler) HandleGetAdapters(w http.ResponseWriter, r *http.Request) { + adapters, err := h.Store.GetAdapters(r.Context()) + if err != nil { + http.Error(w, "Database error", http.StatusInternalServerError) + return + } + json.NewEncoder(w).Encode(adapters) +} + +func (h *Handler) HandleCreateAdapter(w http.ResponseWriter, r *http.Request) { + var adapter domain2.Adapter + if err := json.NewDecoder(r.Body).Decode(&adapter); err != nil { + http.Error(w, "Invalid JSON", http.StatusBadRequest) + return + } + if err := h.Store.SaveAdapter(r.Context(), adapter); err != nil { + http.Error(w, "Failed to save adapter", http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusCreated) +} + +func (h *Handler) HandleDeleteAdapter(w http.ResponseWriter, r *http.Request) { + idStr := r.PathValue("id") + id, err := strconv.Atoi(idStr) + if err != nil { + http.Error(w, "Invalid adapter ID", http.StatusBadRequest) + return + } + + if err := h.Store.DeleteAdapter(r.Context(), id); err != nil { + http.Error(w, "Failed to delete adapter", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) +} + +func getJSONValue(data interface{}, path string) interface{} { + if path == "" || path == "." { + return data // The root IS the array + } + keys := strings.Split(path, ".") + current := data + for _, key := range keys { + if m, ok := current.(map[string]interface{}); ok { + current = m[key] + } else { + return nil // Path broke + } + } + return current +} + +func interfaceToString(val interface{}) string { + if val == nil { + return "" + } + if str, ok := val.(string); ok { + return str + } + return "" // Could expand this to handle ints/floats if needed +} + +// HandleAdapterIngest dynamically maps deeply nested JSON arrays into Tickets +func (h *Handler) HandleAdapterIngest(w http.ResponseWriter, r *http.Request) { + adapterName := r.PathValue("name") + adapter, err := h.Store.GetAdapterByName(r.Context(), adapterName) + if err != nil { + http.Error(w, "Adapter not found", http.StatusNotFound) + return + } + + var rawData interface{} + if err := json.NewDecoder(r.Body).Decode(&rawData); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + + findingsNode := getJSONValue(rawData, adapter.FindingsPath) + findingsArray, ok := findingsNode.([]interface{}) + if !ok { + http.Error(w, "Findings path did not resolve to a JSON array", http.StatusBadRequest) + return + } + + type groupKey struct { + Source string + Asset string + } + groupedTickets := make(map[groupKey][]domain2.Ticket) + + for _, item := range findingsArray { + finding, ok := item.(map[string]interface{}) + if !ok { + continue + } + + ticket := domain2.Ticket{ + Source: adapter.SourceName, + Status: "Waiting to be Triaged", // Explicitly set status + Title: interfaceToString(finding[adapter.MappingTitle]), + AssetIdentifier: interfaceToString(finding[adapter.MappingAsset]), + Severity: interfaceToString(finding[adapter.MappingSeverity]), + Description: interfaceToString(finding[adapter.MappingDescription]), + RecommendedRemediation: interfaceToString(finding[adapter.MappingRemediation]), + } + + if ticket.Title != "" && ticket.AssetIdentifier != "" { + hashInput := ticket.Source + "|" + ticket.AssetIdentifier + "|" + ticket.Title + hash := sha256.Sum256([]byte(hashInput)) + ticket.DedupeHash = hex.EncodeToString(hash[:]) + key := groupKey{Source: ticket.Source, Asset: ticket.AssetIdentifier} + groupedTickets[key] = append(groupedTickets[key], ticket) + } + } + + for key, batch := range groupedTickets { + err := h.Store.ProcessIngestionBatch(r.Context(), key.Source, key.Asset, batch) + if err != nil { + log.Printf("๐Ÿ”ฅ JSON Ingestion Error for Asset %s: %v", key.Asset, err) + // ๐Ÿš€ LOG THE BATCH FAILURE + h.Store.LogSync(r.Context(), key.Source, "Failed", len(batch), err.Error()) + http.Error(w, "Database error processing JSON batch", http.StatusInternalServerError) + return + } else { + // ๐Ÿš€ LOG THE SUCCESS + h.Store.LogSync(r.Context(), key.Source, "Success", len(batch), "") + } + } + + w.WriteHeader(http.StatusCreated) +} diff --git a/pkg/adapters/adapters_test.go b/pkg/adapters/adapters_test.go new file mode 100644 index 0000000..2351019 --- /dev/null +++ b/pkg/adapters/adapters_test.go @@ -0,0 +1,142 @@ +package adapters + +import ( + "bytes" + "context" + "database/sql" + "net/http" + "net/http/httptest" + "testing" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func setupTestAdapters(t *testing.T) (*Handler, *sql.DB) { + db := datastore.InitDB(":memory:") + store := datastore.NewSQLiteStore(db) + return NewHandler(store), db +} + +func GetVIPCookie(store domain.Store) *http.Cookie { + user, err := store.GetUserByEmail(context.Background(), "vip@RiskRancher.com") + if err != nil { + user, _ = store.CreateUser(context.Background(), "vip@RiskRancher.com", "Test VIP", "hash", "Sheriff") + } + + store.CreateSession(context.Background(), "vip_token_999", user.ID, time.Now().Add(1*time.Hour)) + return &http.Cookie{Name: "session_token", Value: "vip_token_999"} +} + +func TestHandleAdapterIngest(t *testing.T) { + h, db := setupTestAdapters(t) + defer db.Close() + + adapterPayload := []byte(`{"name": "Trivy Test", "source_name": "TrivyScanner", "findings_path": "Results", "mapping_title": "VulnerabilityID", "mapping_asset": "Target", "mapping_severity": "Severity"}`) + reqAdapter := httptest.NewRequest(http.MethodPost, "/api/adapters", bytes.NewBuffer(adapterPayload)) + reqAdapter.AddCookie(GetVIPCookie(h.Store)) + reqAdapter.Header.Set("Content-Type", "application/json") + rrAdapter := httptest.NewRecorder() + + h.HandleCreateAdapter(rrAdapter, reqAdapter) + + payload := []byte(`{"SchemaVersion": 2, "Results": [{"VulnerabilityID": "CVE-1", "Target": "A", "Severity": "HIGH"}]}`) + req := httptest.NewRequest(http.MethodPost, "/api/ingest/Trivy%20Test", bytes.NewBuffer(payload)) + req.AddCookie(GetVIPCookie(h.Store)) + req.Header.Set("Content-Type", "application/json") + + req.SetPathValue("name", "Trivy Test") + rr := httptest.NewRecorder() + h.HandleAdapterIngest(rr, req) + + if rr.Code != http.StatusCreated { + t.Fatalf("Expected 201 Created, got %d", rr.Code) + } +} + +func TestGetAdapters(t *testing.T) { + h, db := setupTestAdapters(t) + defer db.Close() + + db.Exec(`INSERT INTO data_adapters (name, source_name, findings_path, mapping_title, mapping_asset, mapping_severity) VALUES ('Trivy Test', 'Trivy', 'Results', 'VulnerabilityID', 'PkgName', 'Severity')`) + + req := httptest.NewRequest(http.MethodGet, "/api/adapters", nil) + req.AddCookie(GetVIPCookie(h.Store)) + rr := httptest.NewRecorder() + h.HandleGetAdapters(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d", rr.Code) + } +} + +func TestCreateAdapter(t *testing.T) { + h, db := setupTestAdapters(t) + defer db.Close() + + payload := []byte(`{"name": "AcmeSec", "source_name": "Acme", "findings_path": "issues", "mapping_title": "t", "mapping_asset": "a", "mapping_severity": "s"}`) + req := httptest.NewRequest(http.MethodPost, "/api/adapters", bytes.NewBuffer(payload)) + req.AddCookie(GetVIPCookie(h.Store)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + + h.HandleCreateAdapter(rr, req) + + if rr.Code != http.StatusCreated { + t.Fatalf("Expected 201 Created, got %d", rr.Code) + } +} + +func TestJSONIngestion(t *testing.T) { + h, db := setupTestAdapters(t) + defer db.Close() + + _, err := db.Exec(` + INSERT INTO data_adapters ( + id, name, source_name, findings_path, + mapping_title, mapping_asset, mapping_severity + ) VALUES ( + 998, 'NestedScanner', 'DeepScan', 'scan_data.results', + 'vuln_name', 'target_ip', 'risk_level' + ) + `) + if err != nil { + t.Fatalf("Failed to setup nested adapter: %v", err) + } + + payload := []byte(`{ + "metadata": { "version": "1.0" }, + "scan_data": { + "results": [ + { + "vuln_name": "Log4j RCE", + "target_ip": "10.0.0.5", + "risk_level": "Critical" + } + ] + } + }`) + + req := httptest.NewRequest(http.MethodPost, "/api/ingest/NestedScanner", bytes.NewBuffer(payload)) + req.Header.Set("Content-Type", "application/json") + req.AddCookie(GetVIPCookie(h.Store)) + + req.SetPathValue("name", "NestedScanner") + rr := httptest.NewRecorder() + h.HandleAdapterIngest(rr, req) + + if rr.Code != http.StatusCreated { + t.Fatalf("Expected 201 Created, got %d. Body: %s", rr.Code, rr.Body.String()) + } + + var title, severity string + err = db.QueryRow("SELECT title, severity FROM tickets WHERE source = 'DeepScan'").Scan(&title, &severity) + if err != nil { + t.Fatalf("Failed to query ingested ticket: %v", err) + } + + if title != "Log4j RCE" || severity != "Critical" { + t.Errorf("JSON Mapping failed! Expected 'Log4j RCE' / 'Critical', got '%s' / '%s'", title, severity) + } +} diff --git a/pkg/adapters/handler.go b/pkg/adapters/handler.go new file mode 100644 index 0000000..8097ec6 --- /dev/null +++ b/pkg/adapters/handler.go @@ -0,0 +1,13 @@ +package adapters + +import ( + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +type Handler struct { + Store domain.Store +} + +func NewHandler(store domain.Store) *Handler { + return &Handler{Store: store} +} diff --git a/pkg/admin/admin.go b/pkg/admin/admin.go new file mode 100644 index 0000000..2397af5 --- /dev/null +++ b/pkg/admin/admin.go @@ -0,0 +1,62 @@ +package admin + +import ( + "encoding/json" + "net/http" + "strconv" +) + +func (h *Handler) HandleGetConfig(w http.ResponseWriter, r *http.Request) { + config, err := h.Store.GetAppConfig(r.Context()) + if err != nil { + http.Error(w, "Failed to fetch configuration", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(config) +} + +func (h *Handler) HandleExportState(w http.ResponseWriter, r *http.Request) { + state, err := h.Store.ExportSystemState(r.Context()) + if err != nil { + http.Error(w, "Failed to generate system export", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Content-Disposition", "attachment; filename=RiskRancher_export.json") + w.WriteHeader(http.StatusOK) + + if err := json.NewEncoder(w).Encode(state); err != nil { + // Note: We can't change the HTTP status code here because we've already started streaming, + // but we can log the error if the stream breaks. + _ = err + } +} + +func (h *Handler) HandleGetLogs(w http.ResponseWriter, r *http.Request) { + filter := r.URL.Query().Get("filter") + page, err := strconv.Atoi(r.URL.Query().Get("page")) + if err != nil || page < 1 { + page = 1 + } + + limit := 15 + offset := (page - 1) * limit + + feed, total, err := h.Store.GetPaginatedActivityFeed(r.Context(), filter, limit, offset) + if err != nil { + http.Error(w, "Failed to load logs", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]any{ + "feed": feed, + "total": total, + "page": page, + "limit": limit, + }) +} diff --git a/pkg/admin/admin_handlers.go b/pkg/admin/admin_handlers.go new file mode 100644 index 0000000..de3f462 --- /dev/null +++ b/pkg/admin/admin_handlers.go @@ -0,0 +1,192 @@ +package admin + +import ( + "encoding/json" + "net/http" + "strconv" + "strings" + + "epigas.gitea.cloud/RiskRancher/core/pkg/auth" +) + +// PasswordResetRequest is the expected JSON payload +type PasswordResetRequest struct { + NewPassword string `json:"new_password"` +} + +// HandleAdminResetPassword allows a Sheriff to forcefully overwrite a user's password. +func (h *Handler) HandleAdminResetPassword(w http.ResponseWriter, r *http.Request) { + idStr := r.PathValue("id") + userID, err := strconv.Atoi(idStr) + if err != nil { + http.Error(w, "Invalid user ID in URL", http.StatusBadRequest) + return + } + + var req PasswordResetRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + + if req.NewPassword == "" { + http.Error(w, "New password cannot be empty", http.StatusBadRequest) + return + } + + hashedPassword, err := auth.HashPassword(req.NewPassword) + if err != nil { + http.Error(w, "Internal server error during hashing", http.StatusInternalServerError) + return + } + + err = h.Store.UpdateUserPassword(r.Context(), userID, hashedPassword) + if err != nil { + http.Error(w, "Failed to update user password", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{ + "message": "Password reset successfully", + }) +} + +type RoleUpdateRequest struct { + GlobalRole string `json:"global_role"` +} + +// HandleUpdateUserRole allows a Sheriff to promote or demote a user. +func (h *Handler) HandleUpdateUserRole(w http.ResponseWriter, r *http.Request) { + idStr := r.PathValue("id") + userID, err := strconv.Atoi(idStr) + if err != nil { + http.Error(w, "Invalid user ID in URL", http.StatusBadRequest) + return + } + var req RoleUpdateRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + + validRoles := map[string]bool{ + "Sheriff": true, "Wrangler": true, "RangeHand": true, "CircuitRider": true, "Magistrate": true, + } + if !validRoles[req.GlobalRole] { + http.Error(w, "Invalid role provided", http.StatusBadRequest) + return + } + + err = h.Store.UpdateUserRole(r.Context(), userID, req.GlobalRole) + if err != nil { + http.Error(w, "Failed to update user role", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{ + "message": "User role updated successfully to " + req.GlobalRole, + }) +} + +// HandleDeactivateUser allows a Sheriff to safely offboard a user. +func (h *Handler) HandleDeactivateUser(w http.ResponseWriter, r *http.Request) { + idStr := r.PathValue("id") + userID, err := strconv.Atoi(idStr) + if err != nil { + http.Error(w, "Invalid user ID in URL", http.StatusBadRequest) + return + } + + err = h.Store.DeactivateUserAndReassign(r.Context(), userID) + if err != nil { + http.Error(w, "Failed to deactivate user", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{ + "message": "User successfully deactivated and tickets reassigned.", + }) +} + +// CreateUserRequest is the payload the Sheriff sends to invite a new user +type CreateUserRequest struct { + Email string `json:"email"` + FullName string `json:"full_name"` + Password string `json:"password"` + GlobalRole string `json:"global_role"` +} + +// HandleCreateUser allows a Sheriff to manually provision a new user account. +func (h *Handler) HandleCreateUser(w http.ResponseWriter, r *http.Request) { + var req CreateUserRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + + if req.Email == "" || req.FullName == "" || req.Password == "" || req.GlobalRole == "" { + http.Error(w, "Missing required fields", http.StatusBadRequest) + return + } + + validRoles := map[string]bool{ + "Sheriff": true, "Wrangler": true, "RangeHand": true, "CircuitRider": true, "Magistrate": true, + } + if !validRoles[req.GlobalRole] { + http.Error(w, "Invalid role provided", http.StatusBadRequest) + return + } + + hashedPassword, err := auth.HashPassword(req.Password) + if err != nil { + http.Error(w, "Internal server error during hashing", http.StatusInternalServerError) + return + } + + user, err := h.Store.CreateUser(r.Context(), req.Email, req.FullName, hashedPassword, req.GlobalRole) + if err != nil { + if strings.Contains(err.Error(), "UNIQUE constraint failed") { + http.Error(w, "Email already exists in the system", http.StatusConflict) + return + } + http.Error(w, "Failed to provision user", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "User provisioned successfully. Share the temporary password securely.", + "id": user.ID, + "email": user.Email, + "full_name": user.FullName, + "global_role": user.GlobalRole, + }) +} + +// HandleGetUsers returns a list of all users in the system for the Sheriff to manage. +func (h *Handler) HandleGetUsers(w http.ResponseWriter, r *http.Request) { + users, err := h.Store.GetAllUsers(r.Context()) + if err != nil { + http.Error(w, "Failed to fetch user roster", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(users) +} + +// HandleGetWranglers returns a clean list of IT users for assignment dropdowns +func (h *Handler) HandleGetWranglers(w http.ResponseWriter, r *http.Request) { + wranglers, err := h.Store.GetWranglers(r.Context()) + if err != nil { + http.Error(w, "Failed to fetch wranglers", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(wranglers) +} diff --git a/pkg/admin/admin_lifecycle.go b/pkg/admin/admin_lifecycle.go new file mode 100644 index 0000000..06786cb --- /dev/null +++ b/pkg/admin/admin_lifecycle.go @@ -0,0 +1,69 @@ +package admin + +import ( + "encoding/json" + "net/http" + "time" +) + +const CurrentAppVersion = "v1.0.0" + +type UpdateCheckResponse struct { + Status string `json:"status"` + CurrentVersion string `json:"current_version"` + LatestVersion string `json:"latest_version,omitempty"` + UpdateAvailable bool `json:"update_available"` + Message string `json:"message"` +} + +// HandleCheckUpdates pings gitea. If air-gapped, it returns manual instructions. +func (h *Handler) HandleCheckUpdates(w http.ResponseWriter, r *http.Request) { + respPayload := UpdateCheckResponse{ + CurrentVersion: CurrentAppVersion, + } + + client := http.Client{Timeout: 3 * time.Second} + + giteaURL := "https://epigas.gitea.cloud/api/v1/repos/RiskRancher/core/releases/latest" + resp, err := client.Get(giteaURL) + + if err != nil || resp.StatusCode != http.StatusOK { + respPayload.Status = "offline" + respPayload.Message = "No internet connection detected. To update an air-gapped server: Download the latest RiskRancher binary on a connected machine, transfer it via rsync or scp to this server, and restart the service." + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(respPayload) + return + } + defer resp.Body.Close() + + var ghRelease struct { + TagName string `json:"tag_name"` + } + if err := json.NewDecoder(resp.Body).Decode(&ghRelease); err == nil { + respPayload.Status = "online" + respPayload.LatestVersion = ghRelease.TagName + respPayload.UpdateAvailable = (ghRelease.TagName != CurrentAppVersion) + + if respPayload.UpdateAvailable { + respPayload.Message = "A new version is available! Please trigger a graceful shutdown and swap the binary." + } else { + respPayload.Message = "You are running the latest version." + } + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(respPayload) +} + +// HandleShutdown signals the application to close connections and exit cleanly +func (h *Handler) HandleShutdown(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"message": "Initiating graceful shutdown. The server will exit in 2 seconds..."}`)) + go func() { + time.Sleep(2 * time.Second) + }() +} diff --git a/pkg/admin/admin_test.go b/pkg/admin/admin_test.go new file mode 100644 index 0000000..4cd7337 --- /dev/null +++ b/pkg/admin/admin_test.go @@ -0,0 +1,64 @@ +package admin + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "testing" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func TestGetGlobalConfig(t *testing.T) { + app, db := setupTestAdmin(t) + defer db.Close() + + req := httptest.NewRequest(http.MethodGet, "/api/config", nil) + req.AddCookie(GetVIPCookie(app.Store)) + rr := httptest.NewRecorder() + + app.HandleGetConfig(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d. Body: %s", rr.Code, rr.Body.String()) + } + + var config domain.AppConfig + if err := json.NewDecoder(rr.Body).Decode(&config); err != nil { + t.Fatalf("Failed to decode response: %v", err) + } + + if config.Timezone != "America/New_York" || config.BusinessStart != 9 { + t.Errorf("Expected default config, got TZ: %s, Start: %d", config.Timezone, config.BusinessStart) + } +} + +func TestHandleDeactivateUser(t *testing.T) { + h, db := setupTestAdmin(t) + defer db.Close() + + targetUser, _ := h.Store.CreateUser(context.Background(), "fired@ranch.com", "Fired Fred", "hash", "RangeHand") + res, _ := db.Exec(`INSERT INTO tickets (title, status, severity, source, dedupe_hash) VALUES ('Freds Task', 'Waiting to be Triaged', 'High', 'Manual', 'fake-hash-123')`) + ticketID, _ := res.LastInsertId() + db.Exec(`INSERT INTO ticket_assignments (ticket_id, assignee, role) VALUES (?, 'fired@ranch.com', 'RangeHand')`, ticketID) + + targetURL := fmt.Sprintf("/api/admin/users/%d", targetUser.ID) + req := httptest.NewRequest(http.MethodDelete, targetURL, nil) + req.AddCookie(GetVIPCookie(h.Store)) + req.SetPathValue("id", fmt.Sprintf("%d", targetUser.ID)) + rr := httptest.NewRecorder() + + h.HandleDeactivateUser(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d. Body: %s", rr.Code, rr.Body.String()) + } + + var count int + db.QueryRow(`SELECT COUNT(*) FROM ticket_assignments WHERE assignee = 'fired@ranch.com'`).Scan(&count) + if count != 0 { + t.Errorf("Expected assignments to be cleared, but found %d", count) + } +} diff --git a/pkg/admin/admin_users_test.go b/pkg/admin/admin_users_test.go new file mode 100644 index 0000000..8068640 --- /dev/null +++ b/pkg/admin/admin_users_test.go @@ -0,0 +1,106 @@ +package admin + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "testing" +) + +func TestHandleAdminResetPassword(t *testing.T) { + a, db := setupTestAdmin(t) + defer db.Close() + + targetUser, _ := a.Store.CreateUser(context.Background(), "forgetful@ranch.com", "Forgetful Fred", "old_hash", "RangeHand") + + payload := map[string]string{ + "new_password": "BrandNewSecurePassword123!", + } + body, _ := json.Marshal(payload) + + targetURL := fmt.Sprintf("/api/admin/users/%d/reset-password", targetUser.ID) + req := httptest.NewRequest(http.MethodPatch, targetURL, bytes.NewBuffer(body)) + + req.SetPathValue("id", fmt.Sprintf("%d", targetUser.ID)) + req.Header.Set("Content-Type", "application/json") + + rr := httptest.NewRecorder() + a.HandleAdminResetPassword(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d. Body: %s", rr.Code, rr.Body.String()) + } +} + +func TestHandleUpdateUserRole(t *testing.T) { + a, db := setupTestAdmin(t) + defer db.Close() + + _, _ = a.Store.CreateUser(context.Background(), "boss@ranch.com", "The Boss", "hash", "Sheriff") + targetUser, _ := a.Store.CreateUser(context.Background(), "rookie@ranch.com", "Rookie Ray", "hash", "RangeHand") + + payload := map[string]string{ + "global_role": "Wrangler", + } + body, _ := json.Marshal(payload) + + targetURL := fmt.Sprintf("/api/admin/users/%d/role", targetUser.ID) + req := httptest.NewRequest(http.MethodPatch, targetURL, bytes.NewBuffer(body)) + + req.AddCookie(GetVIPCookie(a.Store)) + req.SetPathValue("id", fmt.Sprintf("%d", targetUser.ID)) + req.Header.Set("Content-Type", "application/json") + + rr := httptest.NewRecorder() + a.HandleUpdateUserRole(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d. Body: %s", rr.Code, rr.Body.String()) + } +} + +func TestHandleCreateUser_SheriffInvite(t *testing.T) { + a, db := setupTestAdmin(t) + defer db.Close() + + payload := map[string]string{ + "email": "magistrate@ranch.com", + "full_name": "Mighty Magistrate", + "password": "TempPassword123!", + "global_role": "Magistrate", + } + body, _ := json.Marshal(payload) + req := httptest.NewRequest(http.MethodPost, "/api/admin/users", bytes.NewBuffer(body)) + + req.AddCookie(GetVIPCookie(a.Store)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + a.HandleCreateUser(rr, req) + if rr.Code != http.StatusCreated { + t.Fatalf("Expected 201 Created, got %d. Body: %s", rr.Code, rr.Body.String()) + } + + var count int + db.QueryRow(`SELECT COUNT(*) FROM users WHERE email = 'magistrate@ranch.com'`).Scan(&count) + if count != 1 { + t.Errorf("Expected user to be created in the database") + } +} + +func TestHandleGetUsers(t *testing.T) { + a, db := setupTestAdmin(t) + defer db.Close() + + req := httptest.NewRequest(http.MethodGet, "/api/admin/users", nil) + + req.AddCookie(GetVIPCookie(a.Store)) + + rr := httptest.NewRecorder() + a.HandleGetUsers(rr, req) + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d. Body: %s", rr.Code, rr.Body.String()) + } +} diff --git a/pkg/admin/export_test.go b/pkg/admin/export_test.go new file mode 100644 index 0000000..c4b38ad --- /dev/null +++ b/pkg/admin/export_test.go @@ -0,0 +1,44 @@ +package admin + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func TestExportSystemState(t *testing.T) { + app, db := setupTestAdmin(t) + defer db.Close() + _, err := db.Exec(` + INSERT INTO tickets (title, severity, status, dedupe_hash) + VALUES ('Export Test Vuln', 'High', 'Triaged', 'test_hash_123') + `) + if err != nil { + t.Fatalf("Failed to insert test ticket: %v", err) + } + req := httptest.NewRequest(http.MethodGet, "/api/admin/export", nil) + req.AddCookie(GetVIPCookie(app.Store)) + rr := httptest.NewRecorder() + + app.HandleExportState(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d", rr.Code) + } + + if rr.Header().Get("Content-Disposition") != "attachment; filename=RiskRancher_export.json" { + t.Errorf("Missing or incorrect Content-Disposition header") + } + + var state domain.ExportState + if err := json.NewDecoder(rr.Body).Decode(&state); err != nil { + t.Fatalf("Failed to decode exported JSON: %v", err) + } + + if len(state.Tickets) == 0 || state.Tickets[0].Title != "Export Test Vuln" { + t.Errorf("Export did not contain the expected ticket data") + } +} diff --git a/pkg/admin/handler.go b/pkg/admin/handler.go new file mode 100644 index 0000000..d31e6bd --- /dev/null +++ b/pkg/admin/handler.go @@ -0,0 +1,15 @@ +package admin + +import ( + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +// Handler encapsulates all Admin and Sheriff HTTP logic +type Handler struct { + Store domain.Store +} + +// NewHandler creates a new Admin Handler +func NewHandler(store domain.Store) *Handler { + return &Handler{Store: store} +} diff --git a/pkg/admin/helpers_test.go b/pkg/admin/helpers_test.go new file mode 100644 index 0000000..4863ce8 --- /dev/null +++ b/pkg/admin/helpers_test.go @@ -0,0 +1,30 @@ +package admin + +import ( + "context" + "database/sql" + "net/http" + "testing" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +// setupTestAdmin returns the clean Admin Handler and the raw DB +func setupTestAdmin(t *testing.T) (*Handler, *sql.DB) { + db := datastore.InitDB(":memory:") + store := datastore.NewSQLiteStore(db) + return NewHandler(store), db +} + +// GetVIPCookie creates a dummy Sheriff user to bypass the Bouncer in tests +func GetVIPCookie(store domain.Store) *http.Cookie { + user, err := store.GetUserByEmail(context.Background(), "vip_test@RiskRancher.com") + if err != nil { + user, _ = store.CreateUser(context.Background(), "vip_test@RiskRancher.com", "Test VIP", "hash", "Sheriff") + } + token := "vip_test_token_999" + store.CreateSession(context.Background(), token, user.ID, time.Now().Add(1*time.Hour)) + return &http.Cookie{Name: "session_token", Value: token} +} diff --git a/pkg/admin/updates_test.go b/pkg/admin/updates_test.go new file mode 100644 index 0000000..2e2e425 --- /dev/null +++ b/pkg/admin/updates_test.go @@ -0,0 +1,36 @@ +package admin + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "testing" +) + +func TestCheckUpdates_OfflineFallback(t *testing.T) { + + app, db := setupTestAdmin(t) + defer db.Close() + + req := httptest.NewRequest(http.MethodGet, "/api/admin/check-updates", nil) + req.AddCookie(GetVIPCookie(app.Store)) + rr := httptest.NewRecorder() + + app.HandleCheckUpdates(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d", rr.Code) + } + + var response map[string]interface{} + if err := json.NewDecoder(rr.Body).Decode(&response); err != nil { + t.Fatalf("Failed to decode response: %v", err) + } + + if _, exists := response["status"]; !exists { + t.Errorf("Expected 'status' field in response") + } + if _, exists := response["message"]; !exists { + t.Errorf("Expected 'message' field in response") + } +} diff --git a/pkg/analytics/analytics.go b/pkg/analytics/analytics.go new file mode 100644 index 0000000..014a897 --- /dev/null +++ b/pkg/analytics/analytics.go @@ -0,0 +1,17 @@ +package analytics + +import ( + "encoding/json" + "net/http" +) + +func (h *Handler) HandleGetAnalyticsSummary(w http.ResponseWriter, r *http.Request) { + summary, err := h.Store.GetAnalyticsSummary(r.Context()) + if err != nil { + http.Error(w, "Failed to generate analytics", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(summary) +} diff --git a/pkg/analytics/analytics_test.go b/pkg/analytics/analytics_test.go new file mode 100644 index 0000000..ec2b89f --- /dev/null +++ b/pkg/analytics/analytics_test.go @@ -0,0 +1,60 @@ +package analytics + +import ( + "context" + "database/sql" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func setupTestAnalytics(t *testing.T) (*Handler, *sql.DB) { + db := datastore.InitDB(":memory:") + store := datastore.NewSQLiteStore(db) + return NewHandler(store), db +} + +func GetVIPCookie(store domain.Store) *http.Cookie { + user, _ := store.CreateUser(context.Background(), "vip@RiskRancher.com", "Test VIP", "hash", "Sheriff") + store.CreateSession(context.Background(), "vip_token_999", user.ID, time.Now().Add(1*time.Hour)) + return &http.Cookie{Name: "session_token", Value: "vip_token_999"} +} + +func TestAnalyticsSummary(t *testing.T) { + h, db := setupTestAnalytics(t) + defer db.Close() + + _, err := db.Exec(`INSERT INTO tickets (source, title, severity, status, dedupe_hash) VALUES + ('Trivy', 'Container CVE', 'Critical', 'Waiting to be Triaged', 'hash1'), + ('Trivy', 'Old Lib', 'High', 'Waiting to be Triaged', 'hash2'), + ('Trivy', 'Patched Lib', 'Critical', 'Patched', 'hash3'), + ('Manual Pentest', 'SQLi', 'Critical', 'Waiting to be Triaged', 'hash4') + `) + if err != nil { + t.Fatalf("Failed to insert dummy data: %v", err) + } + + req := httptest.NewRequest(http.MethodGet, "/api/analytics/summary", nil) + req.AddCookie(GetVIPCookie(h.Store)) + rr := httptest.NewRecorder() + + h.HandleGetAnalyticsSummary(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d. Body: %s", rr.Code, rr.Body.String()) + } + + var summary map[string]int + if err := json.NewDecoder(rr.Body).Decode(&summary); err != nil { + t.Fatalf("Failed to decode JSON: %v", err) + } + + if summary["Total_Open"] != 3 { + t.Errorf("Expected 3 total open tickets, got %d", summary["Total_Open"]) + } +} diff --git a/pkg/analytics/handler.go b/pkg/analytics/handler.go new file mode 100644 index 0000000..e4e1e17 --- /dev/null +++ b/pkg/analytics/handler.go @@ -0,0 +1,13 @@ +package analytics + +import ( + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +type Handler struct { + Store domain.Store +} + +func NewHandler(store domain.Store) *Handler { + return &Handler{Store: store} +} diff --git a/pkg/auth/auth.go b/pkg/auth/auth.go new file mode 100644 index 0000000..411f1fc --- /dev/null +++ b/pkg/auth/auth.go @@ -0,0 +1,41 @@ +package auth + +import ( + "encoding/base64" + "math/rand" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" + "golang.org/x/crypto/bcrypt" +) + +// Handler encapsulates all Identity and Access HTTP logic +type Handler struct { + Store domain.Store +} + +// NewHandler creates a new Auth Handler +func NewHandler(store domain.Store) *Handler { + return &Handler{Store: store} +} + +// HashPassword takes a plaintext password, automatically generates a secure salt +func HashPassword(password string) (string, error) { + bytes, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + return string(bytes), err +} + +// CheckPasswordHash securely compares a plaintext password with a stored bcrypt hash. +func CheckPasswordHash(password, hash string) bool { + err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) + return err == nil +} + +// GenerateSessionToken creates a cryptographically secure random string +func GenerateSessionToken() (string, error) { + b := make([]byte, 32) + _, err := rand.Read(b) + if err != nil { + return "", err + } + return base64.URLEncoding.EncodeToString(b), nil +} diff --git a/pkg/auth/auth_handlers.go b/pkg/auth/auth_handlers.go new file mode 100644 index 0000000..2d7dd92 --- /dev/null +++ b/pkg/auth/auth_handlers.go @@ -0,0 +1,140 @@ +package auth + +import ( + "encoding/json" + "net/http" + "strings" + "time" +) + +const SessionCookieName = "session_token" + +// RegisterRequest represents the JSON payload expected for user registration. +type RegisterRequest struct { + Email string `json:"email"` + FullName string `json:"full_name"` + Password string `json:"password"` + GlobalRole string `json:"global_role"` +} + +// LoginRequest represents the JSON payload expected for user login. +type LoginRequest struct { + Email string `json:"email"` + Password string `json:"password"` +} + +// HandleRegister processes new user signups. +func (h *Handler) HandleRegister(w http.ResponseWriter, r *http.Request) { + var req RegisterRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid request body", http.StatusBadRequest) + return + } + + count, err := h.Store.GetUserCount(r.Context()) + if err != nil { + http.Error(w, "Internal server error", http.StatusInternalServerError) + return + } + + if count > 0 { + http.Error(w, "Forbidden: System already initialized. Contact your Sheriff for an account.", http.StatusForbidden) + return + } + + req.GlobalRole = "Sheriff" + + if req.Email == "" || req.Password == "" || req.FullName == "" { + http.Error(w, "Missing required fields", http.StatusBadRequest) + return + } + + hashedPassword, err := HashPassword(req.Password) + if err != nil { + http.Error(w, "Failed to hash password", http.StatusInternalServerError) + return + } + + user, err := h.Store.CreateUser(r.Context(), req.Email, req.FullName, hashedPassword, req.GlobalRole) + if err != nil { + if strings.Contains(err.Error(), "UNIQUE constraint failed") { + http.Error(w, "Email already exists", http.StatusConflict) + return + } + http.Error(w, "Failed to create user", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(user) +} + +// HandleLogin authenticates a user and issues a session cookie. +func (h *Handler) HandleLogin(w http.ResponseWriter, r *http.Request) { + var req LoginRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + + user, err := h.Store.GetUserByEmail(r.Context(), req.Email) + if err != nil { + http.Error(w, "Invalid credentials", http.StatusUnauthorized) + return + } + + if !CheckPasswordHash(req.Password, user.PasswordHash) { + http.Error(w, "Invalid credentials", http.StatusUnauthorized) + return + } + + token, err := GenerateSessionToken() + if err != nil { + http.Error(w, "Failed to generate session", http.StatusInternalServerError) + return + } + + expiresAt := time.Now().Add(24 * time.Hour) + if err := h.Store.CreateSession(r.Context(), token, user.ID, expiresAt); err != nil { + http.Error(w, "Failed to persist session", http.StatusInternalServerError) + return + } + + http.SetCookie(w, &http.Cookie{ + Name: "session_token", + Value: token, + Expires: expiresAt, + Path: "/", + HttpOnly: true, + Secure: false, // Set to TRUE in production for HTTPS! + SameSite: http.SameSiteLaxMode, + }) + + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(user) +} + +// HandleLogout destroys the user's session in the database and clears their cookie. +func (h *Handler) HandleLogout(w http.ResponseWriter, r *http.Request) { + cookie, err := r.Cookie(SessionCookieName) + + if err == nil && cookie.Value != "" { + _ = h.Store.DeleteSession(r.Context(), cookie.Value) + } + + http.SetCookie(w, &http.Cookie{ + Name: SessionCookieName, + Value: "", + Path: "/", + Expires: time.Unix(0, 0), + MaxAge: -1, + HttpOnly: true, + Secure: true, // Ensures it's only sent over HTTPS + SameSite: http.SameSiteStrictMode, + }) + + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{ + "message": "Successfully logged out", + }) +} diff --git a/pkg/auth/auth_handlers_test.go b/pkg/auth/auth_handlers_test.go new file mode 100644 index 0000000..c274f0a --- /dev/null +++ b/pkg/auth/auth_handlers_test.go @@ -0,0 +1,111 @@ +package auth + +import ( + "bytes" + "database/sql" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" +) + +func setupTestAuth(t *testing.T) (*Handler, *sql.DB) { + db := datastore.InitDB(":memory:") + + store := datastore.NewSQLiteStore(db) + + h := NewHandler(store) + + return h, db +} + +func TestAuthHandlers(t *testing.T) { + a, db := setupTestAuth(t) + defer db.Close() + + t.Run("Successful Registration", func(t *testing.T) { + payload := map[string]string{ + "email": "admin@RiskRancher.com", + "full_name": "Doc Holliday", + "password": "SuperSecretPassword123!", + "global_role": "Sheriff", // Use a valid role! + } + body, _ := json.Marshal(payload) + + req := httptest.NewRequest(http.MethodPost, "/api/auth/register", bytes.NewBuffer(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + + a.HandleRegister(rr, req) + + if rr.Code != http.StatusCreated { + t.Fatalf("Expected 201 Created for registration, got %d", rr.Code) + } + }) + + t.Run("Successful Login Issues Cookie", func(t *testing.T) { + payload := map[string]string{ + "email": "admin@RiskRancher.com", + "password": "SuperSecretPassword123!", + } + body, _ := json.Marshal(payload) + + req := httptest.NewRequest(http.MethodPost, "/api/auth/login", bytes.NewBuffer(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + + a.HandleLogin(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK for successful login, got %d", rr.Code) + } + + cookies := rr.Result().Cookies() + if len(cookies) == 0 { + t.Fatalf("Expected a session cookie to be set, but none was found") + } + if cookies[0].Name != "session_token" { + t.Errorf("Expected cookie named 'session_token', got '%s'", cookies[0].Name) + } + }) + + t.Run("Failed Login Rejects Access", func(t *testing.T) { + payload := map[string]string{ + "email": "admin@RiskRancher.com", + "password": "WrongPassword!", + } + body, _ := json.Marshal(payload) + + req := httptest.NewRequest(http.MethodPost, "/api/auth/login", bytes.NewBuffer(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + + a.HandleLogin(rr, req) + + if rr.Code != http.StatusUnauthorized { + t.Fatalf("Expected 401 Unauthorized for wrong password, got %d", rr.Code) + } + }) +} + +func TestHandleLogout(t *testing.T) { + a, db := setupTestAuth(t) + defer db.Close() + + req := httptest.NewRequest(http.MethodPost, "/api/auth/logout", nil) + + cookie := &http.Cookie{ + Name: SessionCookieName, + Value: "fake-session-token-123", + } + req.AddCookie(cookie) + + rr := httptest.NewRecorder() + a.HandleLogout(rr, req) + + if rr.Code != http.StatusOK { + t.Fatalf("Expected 200 OK, got %d. Body: %s", rr.Code, rr.Body.String()) + } +} diff --git a/pkg/auth/auth_test.go b/pkg/auth/auth_test.go new file mode 100644 index 0000000..5892a16 --- /dev/null +++ b/pkg/auth/auth_test.go @@ -0,0 +1,49 @@ +package auth + +import ( + "testing" +) + +func TestPasswordHashing(t *testing.T) { + password := "SuperSecretSOCPassword123!" + + hash, err := HashPassword(password) + if err != nil { + t.Fatalf("Failed to hash password: %v", err) + } + + if hash == password { + t.Fatalf("Security failure: Hash matches plain text!") + } + if len(hash) == 0 { + t.Fatalf("Hash is empty") + } + + isValid := CheckPasswordHash(password, hash) + if !isValid { + t.Errorf("Expected valid password to match hash, but it failed") + } + + isInvalid := CheckPasswordHash("WrongPassword!", hash) + if isInvalid { + t.Errorf("Security failure: Incorrect password returned true!") + } +} + +func TestGenerateSessionToken(t *testing.T) { + + token1, err1 := GenerateSessionToken() + token2, err2 := GenerateSessionToken() + + if err1 != nil || err2 != nil { + t.Fatalf("Failed to generate session tokens") + } + + if len(token1) < 32 { + t.Errorf("Token is too short for security standards: %d chars", len(token1)) + } + + if token1 == token2 { + t.Errorf("CRITICAL: RNG generated the exact same token twice: %s", token1) + } +} diff --git a/pkg/auth/middleware.go b/pkg/auth/middleware.go new file mode 100644 index 0000000..762acc8 --- /dev/null +++ b/pkg/auth/middleware.go @@ -0,0 +1,56 @@ +package auth + +import ( + "context" + "net/http" + "time" +) + +type contextKey string + +const UserIDKey contextKey = "user_id" + +func (h *Handler) RequireAuth(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + cookie, err := r.Cookie("session_token") + if err != nil { + http.Error(w, "Unauthorized: Missing session cookie", http.StatusUnauthorized) + return + } + + session, err := h.Store.GetSession(r.Context(), cookie.Value) + if err != nil { + http.Error(w, "Unauthorized: Invalid session", http.StatusUnauthorized) + return + } + + if session.ExpiresAt.Before(time.Now()) { + http.Error(w, "Unauthorized: Session expired", http.StatusUnauthorized) + return + } + + ctx := context.WithValue(r.Context(), UserIDKey, session.UserID) + + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// RequireUIAuth checks for a valid session and redirects to /login if it fails, +func (h *Handler) RequireUIAuth(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + cookie, err := r.Cookie("session_token") + if err != nil { + http.Redirect(w, r, "/login", http.StatusSeeOther) + return + } + + session, err := h.Store.GetSession(r.Context(), cookie.Value) + if err != nil || session.ExpiresAt.Before(time.Now()) { + http.Redirect(w, r, "/login", http.StatusSeeOther) + return + } + + ctx := context.WithValue(r.Context(), UserIDKey, session.UserID) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} diff --git a/pkg/auth/middleware_test.go b/pkg/auth/middleware_test.go new file mode 100644 index 0000000..029d2b2 --- /dev/null +++ b/pkg/auth/middleware_test.go @@ -0,0 +1,61 @@ +package auth + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" +) + +func TestRequireAuthMiddleware(t *testing.T) { + h, db := setupTestAuth(t) + defer db.Close() + + user, err := h.Store.CreateUser(context.Background(), "vip@RiskRancher.com", "Wyatt Earp", "fake_hash", "Sheriff") + if err != nil { + t.Fatalf("Failed to seed test user: %v", err) + } + + validToken := "valid_test_token_123" + expiresAt := time.Now().Add(1 * time.Hour) + err = h.Store.CreateSession(context.Background(), validToken, user.ID, expiresAt) + if err != nil { + t.Fatalf("Failed to seed test session: %v", err) + } + + dummyHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte("Welcome to the VIP room")) + }) + protectedHandler := h.RequireAuth(dummyHandler) + + tests := []struct { + name string + cookieName string + cookieValue string + expectedStatus int + }{ + {"Missing Cookie", "", "", http.StatusUnauthorized}, + {"Wrong Cookie Name", "wrong_name", validToken, http.StatusUnauthorized}, + {"Invalid Token", "session_token", "fake_invalid_token", http.StatusUnauthorized}, + {"Valid Token", "session_token", validToken, http.StatusOK}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/", nil) + + if tt.cookieName != "" { + req.AddCookie(&http.Cookie{Name: tt.cookieName, Value: tt.cookieValue}) + } + + rr := httptest.NewRecorder() + protectedHandler.ServeHTTP(rr, req) + + if rr.Code != tt.expectedStatus { + t.Errorf("Expected status %d, got %d", tt.expectedStatus, rr.Code) + } + }) + } +} diff --git a/pkg/auth/rbac_middleware.go b/pkg/auth/rbac_middleware.go new file mode 100644 index 0000000..9e335ee --- /dev/null +++ b/pkg/auth/rbac_middleware.go @@ -0,0 +1,74 @@ +package auth + +import ( + "net/http" +) + +// RequireRole acts as the checker +func (h *Handler) RequireRole(requiredRole string) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + userIDVal := r.Context().Value(UserIDKey) + if userIDVal == nil { + http.Error(w, "Unauthorized: No user context", http.StatusUnauthorized) + return + } + + userID, ok := userIDVal.(int) + if !ok { + http.Error(w, "Internal Server Error: Invalid user context", http.StatusInternalServerError) + return + } + + user, err := h.Store.GetUserByID(r.Context(), userID) + if err != nil { + http.Error(w, "Forbidden: User not found", http.StatusForbidden) + return + } + + if user.GlobalRole != requiredRole { + http.Error(w, "Forbidden: Insufficient permissions", http.StatusForbidden) + return + } + + next.ServeHTTP(w, r) + }) + } +} + +// RequireAnyRole allows access if the user has ANY of the provided roles. +func (h *Handler) RequireAnyRole(allowedRoles ...string) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + userIDVal := r.Context().Value(UserIDKey) + if userIDVal == nil { + http.Error(w, "Unauthorized: No user context", http.StatusUnauthorized) + return + } + + userID, ok := userIDVal.(int) + if !ok { + http.Error(w, "Internal Server Error: Invalid user context", http.StatusInternalServerError) + return + } + + user, err := h.Store.GetUserByID(r.Context(), userID) + if err != nil { + http.Error(w, "Forbidden: User not found", http.StatusForbidden) + return + } + + for _, role := range allowedRoles { + if user.GlobalRole == role { + // Match found! Open the door. + next.ServeHTTP(w, r) + return + } + } + + http.Error(w, "Forbidden: Insufficient permissions", http.StatusForbidden) + }) + } +} diff --git a/pkg/auth/rbac_middleware_test.go b/pkg/auth/rbac_middleware_test.go new file mode 100644 index 0000000..3d28eff --- /dev/null +++ b/pkg/auth/rbac_middleware_test.go @@ -0,0 +1,49 @@ +package auth + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" +) + +func TestRequireRoleMiddleware(t *testing.T) { + a, db := setupTestAuth(t) + defer db.Close() + + sheriff, _ := a.Store.CreateUser(context.Background(), "sheriff@ranch.com", "Wyatt Earp", "hash", "Sheriff") + rangeHand, _ := a.Store.CreateUser(context.Background(), "hand@ranch.com", "Jesse James", "hash", "RangeHand") + + vipHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte("Welcome to the Manager's Office")) + }) + + protectedHandler := a.RequireRole("Sheriff")(vipHandler) + + tests := []struct { + name string + userID int + expectedStatus int + }{ + {"Valid Sheriff Access", sheriff.ID, http.StatusOK}, + {"Denied RangeHand Access", rangeHand.ID, http.StatusForbidden}, + {"Unknown User", 9999, http.StatusForbidden}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + req := httptest.NewRequest(http.MethodGet, "/admin/passwords", nil) + + ctx := context.WithValue(req.Context(), UserIDKey, tt.userID) + req = req.WithContext(ctx) + + rr := httptest.NewRecorder() + protectedHandler.ServeHTTP(rr, req) + + if rr.Code != tt.expectedStatus { + t.Errorf("Expected status %d, got %d", tt.expectedStatus, rr.Code) + } + }) + } +} diff --git a/pkg/datastore/auth_db.go b/pkg/datastore/auth_db.go new file mode 100644 index 0000000..0015079 --- /dev/null +++ b/pkg/datastore/auth_db.go @@ -0,0 +1,187 @@ +package datastore + +import ( + "context" + "database/sql" + "errors" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +// ErrNotFound is a standard error we can use across our handlers +var ErrNotFound = errors.New("record not found") + +func (s *SQLiteStore) CreateUser(ctx context.Context, email, fullName, passwordHash, globalRole string) (*domain.User, error) { + query := `INSERT INTO users (email, full_name, password_hash, global_role) VALUES (?, ?, ?, ?)` + + result, err := s.DB.ExecContext(ctx, query, email, fullName, passwordHash, globalRole) + if err != nil { + return nil, err + } + + id, err := result.LastInsertId() + if err != nil { + return nil, err + } + + return &domain.User{ + ID: int(id), + Email: email, + FullName: fullName, + PasswordHash: passwordHash, + GlobalRole: globalRole, + }, nil +} + +func (s *SQLiteStore) GetUserByEmail(ctx context.Context, email string) (*domain.User, error) { + var user domain.User + query := "SELECT id, email, password_hash, full_name, global_role FROM users WHERE email = ? AND is_active = 1" + + err := s.DB.QueryRowContext(ctx, query, email).Scan( + &user.ID, + &user.Email, + &user.PasswordHash, + &user.FullName, + &user.GlobalRole, + ) + + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows // Bouncer says no (either wrong email, or deactivated) + } + return nil, err + } + + return &user, nil +} + +func (s *SQLiteStore) CreateSession(ctx context.Context, token string, userID int, expiresAt time.Time) error { + query := `INSERT INTO sessions (session_token, user_id, expires_at) VALUES (?, ?, ?)` + _, err := s.DB.ExecContext(ctx, query, token, userID, expiresAt) + return err +} + +func (s *SQLiteStore) GetSession(ctx context.Context, token string) (*domain.Session, error) { + query := `SELECT session_token, user_id, expires_at FROM sessions WHERE session_token = ?` + + var session domain.Session + err := s.DB.QueryRowContext(ctx, query, token).Scan( + &session.Token, + &session.UserID, + &session.ExpiresAt, + ) + + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ErrNotFound + } + return nil, err + } + + return &session, nil +} + +// GetUserByID fetches a user's full record, including their role +func (s *SQLiteStore) GetUserByID(ctx context.Context, id int) (*domain.User, error) { + query := `SELECT id, email, full_name, password_hash, global_role FROM users WHERE id = ?` + + var user domain.User + err := s.DB.QueryRowContext(ctx, query, id).Scan( + &user.ID, + &user.Email, + &user.FullName, + &user.PasswordHash, + &user.GlobalRole, + ) + + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ErrNotFound + } + return nil, err + } + + return &user, nil +} + +// UpdateUserPassword allows an administrator to overwrite a forgotten password +func (s *SQLiteStore) UpdateUserPassword(ctx context.Context, id int, newPasswordHash string) error { + query := `UPDATE users SET password_hash = ? WHERE id = ?` + + _, err := s.DB.ExecContext(ctx, query, newPasswordHash, id) + return err +} + +// UpdateUserRole promotes or demotes a user by updating their global_role. +func (s *SQLiteStore) UpdateUserRole(ctx context.Context, id int, newRole string) error { + query := `UPDATE users SET global_role = ? WHERE id = ?` + + _, err := s.DB.ExecContext(ctx, query, newRole, id) + return err +} + +// DeactivateUserAndReassign securely offboards a user, kicks them out +func (s *SQLiteStore) DeactivateUserAndReassign(ctx context.Context, userID int) error { + var email string + if err := s.DB.QueryRowContext(ctx, "SELECT email FROM users WHERE id = ?", userID).Scan(&email); err != nil { + return err + } + + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + _, err = tx.ExecContext(ctx, `UPDATE users SET is_active = 0 WHERE id = ?`, userID) + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, `DELETE FROM ticket_assignments WHERE assignee = ?`, email) + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, `DELETE FROM sessions WHERE user_id = ?`, userID) + if err != nil { + return err + } + + return tx.Commit() +} + +// GetUserCount returns the total number of registered users in the system. +func (s *SQLiteStore) GetUserCount(ctx context.Context) (int, error) { + var count int + err := s.DB.QueryRowContext(ctx, `SELECT COUNT(*) FROM users`).Scan(&count) + if err != nil { + return 0, err + } + return count, nil +} + +func (s *SQLiteStore) GetAllUsers(ctx context.Context) ([]*domain.User, error) { + // Notice the return type is now []*domain.User + rows, err := s.DB.QueryContext(ctx, "SELECT id, email, full_name, global_role FROM users WHERE is_active = 1") + if err != nil { + return nil, err + } + defer rows.Close() + + var users []*domain.User + for rows.Next() { + var u domain.User + if err := rows.Scan(&u.ID, &u.Email, &u.FullName, &u.GlobalRole); err == nil { + users = append(users, &u) // ๐Ÿš€ Appending the memory address! + } + } + return users, nil +} + +// DeleteSession removes the token from the database so it can never be used again. +func (s *SQLiteStore) DeleteSession(ctx context.Context, token string) error { + _, err := s.DB.ExecContext(ctx, `DELETE FROM sessions WHERE token = ?`, token) + return err +} diff --git a/pkg/datastore/auth_db_test.go b/pkg/datastore/auth_db_test.go new file mode 100644 index 0000000..462f3b1 --- /dev/null +++ b/pkg/datastore/auth_db_test.go @@ -0,0 +1,73 @@ +package datastore + +import ( + "context" + "testing" + "time" +) + +func TestUserAndSessionLifecycle(t *testing.T) { + store := setupTestDB(t) + defer store.DB.Close() + + _, err := store.DB.Exec(` + CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, email TEXT UNIQUE, full_name TEXT, password_hash TEXT, global_role TEXT, is_active BOOLEAN DEFAULT 1); + CREATE TABLE sessions (session_token TEXT PRIMARY KEY, user_id INTEGER, expires_at DATETIME); + `) + + ctx := context.Background() + + user, err := store.CreateUser(ctx, "admin@RiskRancher.com", "doc", "fake_bcrypt_hash", "Admin") + if err != nil { + t.Fatalf("Failed to create user: %v", err) + } + if user.ID == 0 { + t.Errorf("Expected database to return a valid auto-incremented ID, got 0") + } + + _, err = store.CreateUser(ctx, "admin@RiskRancher.com", "doc", "another_hash", "Analyst") + if err == nil { + t.Fatalf("Security Failure: Database allowed a duplicate email address!") + } + + fetchedUser, err := store.GetUserByEmail(ctx, "admin@RiskRancher.com") + if err != nil { + t.Fatalf("Failed to fetch user by email: %v", err) + } + if fetchedUser.GlobalRole != "Admin" { + t.Errorf("Expected role 'Admin', got '%s'", fetchedUser.GlobalRole) + } + + expires := time.Now().Add(24 * time.Hour) + err = store.CreateSession(ctx, "fake_secure_token", user.ID, expires) + if err != nil { + t.Fatalf("Failed to create session: %v", err) + } + + session, err := store.GetSession(ctx, "fake_secure_token") + if err != nil { + t.Fatalf("Failed to retrieve session: %v", err) + } + if session.UserID != user.ID { + t.Errorf("Session mapped to wrong user! Expected %d, got %d", user.ID, session.UserID) + } + + userByID, err := store.GetUserByID(ctx, user.ID) + if err != nil { + t.Fatalf("Failed to fetch user by ID: %v", err) + } + if userByID.Email != user.Email { + t.Errorf("GetUserByID returned wrong user. Expected %s, got %s", user.Email, userByID.Email) + } + + newHash := "new_secure_bcrypt_hash_999" + err = store.UpdateUserPassword(ctx, user.ID, newHash) + if err != nil { + t.Fatalf("Failed to update user password: %v", err) + } + + updatedUser, _ := store.GetUserByID(ctx, user.ID) + if updatedUser.PasswordHash != newHash { + t.Errorf("Password hash did not update in the database") + } +} diff --git a/pkg/datastore/concurrency_test.go b/pkg/datastore/concurrency_test.go new file mode 100644 index 0000000..2924977 --- /dev/null +++ b/pkg/datastore/concurrency_test.go @@ -0,0 +1,92 @@ +package datastore + +import ( + "database/sql" + "fmt" + "path/filepath" + "sync" + "testing" + + _ "github.com/mattn/go-sqlite3" +) + +// runChaosEngine fires 100 concurrent workers at the provided database connection +func runChaosEngine(db *sql.DB) int { + db.Exec(`CREATE TABLE IF NOT EXISTS tickets (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT, status TEXT)`) + db.Exec(`INSERT INTO tickets (title, status) VALUES ('Seed', 'Open')`) + + var wg sync.WaitGroup + errCh := make(chan error, 1000) + + wg.Add(1) + go func() { + defer wg.Done() + for i := 0; i < 20; i++ { + tx, _ := db.Begin() + for j := 0; j < 50; j++ { + tx.Exec(`INSERT INTO tickets (title, status) VALUES ('Vuln', 'Open')`) + } + if err := tx.Commit(); err != nil { + errCh <- err + } + } + }() + + for w := 0; w < 20; w++ { + wg.Add(1) + go func() { + defer wg.Done() + for i := 0; i < 20; i++ { + if _, err := db.Exec(`UPDATE tickets SET status = 'Patched' WHERE id = 1`); err != nil { + errCh <- err + } + } + }() + } + + for r := 0; r < 79; r++ { + wg.Add(1) + go func() { + defer wg.Done() + for i := 0; i < 50; i++ { + rows, err := db.Query(`SELECT COUNT(*) FROM tickets`) + if err != nil { + errCh <- err + } else { + rows.Close() + } + } + }() + } + + wg.Wait() + close(errCh) + + errorCount := 0 + for range errCh { + errorCount++ + } + return errorCount +} + +func TestSQLiteConcurrency_Tuned_Succeeds(t *testing.T) { + tempDir := t.TempDir() + dbPath := filepath.Join(tempDir, "tuned.db") + + dsn := fmt.Sprintf("%s?_journal_mode=WAL&_synchronous=NORMAL&_busy_timeout=5000", dbPath) + db, err := sql.Open("sqlite3", dsn) + if err != nil { + t.Fatalf("Failed to open tuned DB: %v", err) + } + defer db.Close() + + db.SetMaxOpenConns(25) + db.SetMaxIdleConns(25) + + errors := runChaosEngine(db) + + if errors > 0 { + t.Fatalf("FAILED! Tuned engine threw %d errors. It should have queued them perfectly.", errors) + } + t.Log("SUCCESS: 100 concurrent workers survived SQLite chaos with ZERO locked errors.") +} diff --git a/pkg/datastore/db.go b/pkg/datastore/db.go new file mode 100644 index 0000000..3aa9c3f --- /dev/null +++ b/pkg/datastore/db.go @@ -0,0 +1,94 @@ +package datastore + +import ( + "database/sql" + "embed" + _ "embed" + "encoding/json" + "log" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" + _ "github.com/mattn/go-sqlite3" +) + +//go:embed schema.sql +var schemaSQL string + +//go:embed defaults/*.json +var defaultAdaptersFS embed.FS + +func InitDB(filepath string) *sql.DB { + dsn := "file:" + filepath + "?_journal=WAL&_timeout=5000&_sync=1&_fk=1" + + db, err := sql.Open("sqlite3", dsn) + if err != nil { + log.Fatalf("Failed to open database: %v", err) + } + + db.SetMaxOpenConns(25) + db.SetMaxIdleConns(25) + db.SetConnMaxLifetime(5 * time.Minute) + + migrations := []string{ + schemaSQL, + } + + if err := RunMigrations(db, migrations); err != nil { + log.Fatalf("Database upgrade failed! Halting boot to protect data: %v", err) + } + + SeedAdapters(db) + + return db +} + +// SeedAdapters reads the embedded JSON files and UPSERTs them into SQLite +func SeedAdapters(db *sql.DB) { + files, err := defaultAdaptersFS.ReadDir("defaults") + if err != nil { + log.Printf("No default adapters found or failed to read: %v", err) + return + } + + for _, file := range files { + data, err := defaultAdaptersFS.ReadFile("defaults/" + file.Name()) + if err != nil { + log.Printf("Failed to read adapter file %s: %v", file.Name(), err) + continue + } + + var adapter domain.Adapter + if err := json.Unmarshal(data, &adapter); err != nil { + log.Printf("Failed to parse adapter JSON %s: %v", file.Name(), err) + continue + } + + query := ` + INSERT INTO data_adapters ( + name, source_name, findings_path, mapping_title, + mapping_asset, mapping_severity, mapping_description, mapping_remediation + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(name) DO UPDATE SET + source_name = excluded.source_name, + findings_path = excluded.findings_path, + mapping_title = excluded.mapping_title, + mapping_asset = excluded.mapping_asset, + mapping_severity = excluded.mapping_severity, + mapping_description = excluded.mapping_description, + mapping_remediation = excluded.mapping_remediation, + updated_at = CURRENT_TIMESTAMP; + ` + + _, err = db.Exec(query, + adapter.Name, adapter.SourceName, adapter.FindingsPath, adapter.MappingTitle, + adapter.MappingAsset, adapter.MappingSeverity, adapter.MappingDescription, adapter.MappingRemediation, + ) + + if err != nil { + log.Printf("Failed to seed adapter %s to DB: %v", adapter.Name, err) + } else { + log.Printf("๐Ÿ”Œ Successfully loaded adapter: %s", adapter.Name) + } + } +} diff --git a/pkg/datastore/defaults/trivy.json b/pkg/datastore/defaults/trivy.json new file mode 100644 index 0000000..7a0bdf5 --- /dev/null +++ b/pkg/datastore/defaults/trivy.json @@ -0,0 +1,10 @@ +{ + "name": "Trivy Container Scan", + "source_name": "Trivy", + "findings_path": "Results.0.Vulnerabilities", + "mapping_title": "VulnerabilityID", + "mapping_asset": "PkgName", + "mapping_severity": "Severity", + "mapping_description": "Title", + "mapping_remediation": "FixedVersion" +} \ No newline at end of file diff --git a/pkg/datastore/diff_test.go b/pkg/datastore/diff_test.go new file mode 100644 index 0000000..847c665 --- /dev/null +++ b/pkg/datastore/diff_test.go @@ -0,0 +1,84 @@ +package datastore + +import ( + "context" + "database/sql" + "testing" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" + _ "github.com/mattn/go-sqlite3" // We need the SQLite driver for the test +) + +func setupTestDB(t *testing.T) *SQLiteStore { + db, err := sql.Open("sqlite3", ":memory:") + if err != nil { + t.Fatalf("Failed to open in-memory SQLite database: %v", err) + } + + store := &SQLiteStore{DB: db} + return store +} + +func TestIngestionDiffEngine(t *testing.T) { + store := setupTestDB(t) + defer store.DB.Close() + _, err := store.DB.Exec(` + CREATE TABLE IF NOT EXISTS sla_policies (domain TEXT, severity TEXT, days_to_remediate INTEGER, max_extensions INTEGER, days_to_triage INTEGER); + CREATE TABLE IF NOT EXISTS routing_rules (id INTEGER, rule_type TEXT, match_value TEXT, assignee TEXT, role TEXT); + CREATE TABLE IF NOT EXISTS ticket_assignments (ticket_id INTEGER, assignee TEXT, role TEXT); + CREATE TABLE IF NOT EXISTS ticket_activity (ticket_id INTEGER, actor TEXT, activity_type TEXT, new_value TEXT); + + CREATE TABLE IF NOT EXISTS tickets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + source TEXT, + asset_identifier TEXT, + title TEXT, + severity TEXT, + description TEXT, + status TEXT, + dedupe_hash TEXT UNIQUE, + patched_at DATETIME, + domain TEXT, + triage_due_date DATETIME, + remediation_due_date DATETIME + )`) + + if err != nil { + t.Fatalf("Failed to create schema: %v", err) + } + + store.DB.Exec(`INSERT INTO tickets (source, asset_identifier, title, severity, description, status, dedupe_hash) VALUES + ('Trivy', 'Server-A', 'Old Vuln', 'High', 'Desc', 'Waiting to be Triaged', 'hash_1_open')`) + + store.DB.Exec(`INSERT INTO tickets (source, asset_identifier, title, severity, description, status, dedupe_hash) VALUES + ('Trivy', 'Server-A', 'Old Vuln', 'High', 'Desc', 'Waiting to be Triaged', 'hash_1_open')`) + + store.DB.Exec(`INSERT INTO tickets (source, asset_identifier, title, severity, description, status, dedupe_hash) VALUES + ('Trivy', 'Server-A', 'Regressed Vuln', 'High', 'Desc', 'Patched', 'hash_2_patched')`) + incomingPayload := []domain.Ticket{ + {Source: "Trivy", AssetIdentifier: "Server-A", Title: "Regressed Vuln", DedupeHash: "hash_2_patched"}, + {Source: "Trivy", AssetIdentifier: "Server-A", Title: "Brand New Vuln", DedupeHash: "hash_3_new"}, + } + + err = store.ProcessIngestionBatch(context.Background(), "Trivy", "Server-A", incomingPayload) + if err != nil { + t.Fatalf("Diff Engine failed: %v", err) + } + + var status string + + store.DB.QueryRow(`SELECT status FROM tickets WHERE dedupe_hash = 'hash_1_open'`).Scan(&status) + if status != "Patched" { + t.Errorf("Expected hash_1_open to be Auto-Patched, got %s", status) + } + + store.DB.QueryRow(`SELECT status FROM tickets WHERE dedupe_hash = 'hash_2_patched'`).Scan(&status) + if status != "Waiting to be Triaged" { + t.Errorf("Expected hash_2_patched to be Re-opened, got %s", status) + } + + store.DB.QueryRow(`SELECT status FROM tickets WHERE dedupe_hash = 'hash_3_new'`).Scan(&status) + if status != "Waiting to be Triaged" { + t.Errorf("Expected hash_3_new to be newly created, got %s", status) + } +} diff --git a/pkg/datastore/migrate.go b/pkg/datastore/migrate.go new file mode 100644 index 0000000..3b68217 --- /dev/null +++ b/pkg/datastore/migrate.go @@ -0,0 +1,58 @@ +package datastore + +import ( + "database/sql" + "fmt" + "log" +) + +// RunMigrations ensures the database schema matches the binary version +func RunMigrations(db *sql.DB, migrations []string) error { + _, err := db.Exec(` + CREATE TABLE IF NOT EXISTS schema_migrations ( + version INTEGER PRIMARY KEY, + applied_at DATETIME DEFAULT CURRENT_TIMESTAMP + ) + `) + if err != nil { + return fmt.Errorf("failed to create schema_migrations table: %v", err) + } + + var currentVersion int + err = db.QueryRow("SELECT IFNULL(MAX(version), 0) FROM schema_migrations").Scan(¤tVersion) + if err != nil && err != sql.ErrNoRows { + return fmt.Errorf("failed to read current schema version: %v", err) + } + + for i, query := range migrations { + migrationVersion := i + 1 + + if migrationVersion > currentVersion { + log.Printf("๐Ÿš€ Applying database migration v%d...", migrationVersion) + + // Start a transaction so if the ALTER TABLE fails, it rolls back cleanly + tx, err := db.Begin() + if err != nil { + return err + } + + if _, err := tx.Exec(query); err != nil { + tx.Rollback() + return fmt.Errorf("migration v%d failed: %v", migrationVersion, err) + } + + if _, err := tx.Exec("INSERT INTO schema_migrations (version) VALUES (?)", migrationVersion); err != nil { + tx.Rollback() + return fmt.Errorf("failed to record migration v%d: %v", migrationVersion, err) + } + + if err := tx.Commit(); err != nil { + return err + } + + log.Printf("โœ… Migration v%d applied successfully.", migrationVersion) + } + } + + return nil +} diff --git a/pkg/datastore/migrate_test.go b/pkg/datastore/migrate_test.go new file mode 100644 index 0000000..167ba31 --- /dev/null +++ b/pkg/datastore/migrate_test.go @@ -0,0 +1,42 @@ +package datastore + +import ( + "database/sql" + "testing" + + _ "github.com/mattn/go-sqlite3" +) + +func TestSchemaMigrations(t *testing.T) { + db, err := sql.Open("sqlite3", ":memory:") + if err != nil { + t.Fatalf("Failed to open test db: %v", err) + } + defer db.Close() + + migrations := []string{ + `CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT);`, + `ALTER TABLE users ADD COLUMN email TEXT;`, + } + + err = RunMigrations(db, migrations) + if err != nil { + t.Fatalf("Initial migration failed: %v", err) + } + + var version int + db.QueryRow("SELECT MAX(version) FROM schema_migrations").Scan(&version) + if version != 2 { + t.Errorf("Expected database to be at version 2, got %d", version) + } + + err = RunMigrations(db, migrations) + if err != nil { + t.Fatalf("Idempotent migration failed: %v", err) + } + + _, err = db.Exec("INSERT INTO users (name, email) VALUES ('Tim', 'tim@ranch.com')") + if err != nil { + t.Errorf("Migration 2 did not apply correctly! Column 'email' missing: %v", err) + } +} diff --git a/pkg/datastore/schema.sql b/pkg/datastore/schema.sql new file mode 100644 index 0000000..a39bc54 --- /dev/null +++ b/pkg/datastore/schema.sql @@ -0,0 +1,147 @@ +CREATE TABLE IF NOT EXISTS app_config ( + id INTEGER PRIMARY KEY CHECK (id = 1), + timezone TEXT DEFAULT 'America/New_York', + business_start INTEGER DEFAULT 9, + business_end INTEGER DEFAULT 17, + default_extension_days INTEGER DEFAULT 30, + backup_enabled BOOLEAN DEFAULT 1, + backup_interval_hours INTEGER DEFAULT 24, + backup_retention_days INTEGER DEFAULT 30 + ); + +INSERT OR IGNORE INTO app_config (id) VALUES (1); + +CREATE TABLE IF NOT EXISTS domains (name TEXT PRIMARY KEY); +INSERT OR IGNORE INTO domains (name) VALUES ('Vulnerability'), ('Privacy'), ('Compliance'), ('Incident'); + +CREATE TABLE IF NOT EXISTS departments (name TEXT PRIMARY KEY); +INSERT OR IGNORE INTO departments (name) VALUES ('Security'), ('IT'), ('Privacy'), ('Legal'), ('Compliance'); + +CREATE TABLE IF NOT EXISTS sla_policies ( + domain TEXT NOT NULL, + severity TEXT NOT NULL, + days_to_triage INTEGER NOT NULL DEFAULT 3, + days_to_remediate INTEGER NOT NULL, + max_extensions INTEGER NOT NULL DEFAULT 3, + PRIMARY KEY (domain, severity), + FOREIGN KEY(domain) REFERENCES domains(name) ON DELETE CASCADE + ); + +INSERT OR IGNORE INTO sla_policies (domain, severity, days_to_triage, days_to_remediate, max_extensions) VALUES + ('Vulnerability', 'Critical', 3, 14, 1), ('Vulnerability', 'High', 3, 30, 2), + ('Privacy', 'Critical', 3, 3, 0), ('Privacy', 'High', 3, 7, 1), + ('Incident', 'Critical', 3, 1, 0); + +CREATE TABLE IF NOT EXISTS users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + email TEXT UNIQUE NOT NULL, + password_hash TEXT NOT NULL, + full_name TEXT NOT NULL, + global_role TEXT NOT NULL CHECK(global_role IN ('Sheriff', 'RangeHand', 'Wrangler', 'CircuitRider', 'Magistrate')), + department TEXT NOT NULL DEFAULT 'Security', + is_active BOOLEAN DEFAULT 1, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY(department) REFERENCES departments(name) ON DELETE SET DEFAULT + ); + +CREATE TABLE IF NOT EXISTS sessions ( + session_token TEXT PRIMARY KEY, + user_id INTEGER NOT NULL, + expires_at DATETIME NOT NULL, + FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE + ); + +CREATE TABLE IF NOT EXISTS tickets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT NOT NULL DEFAULT 'Vulnerability', + source TEXT NOT NULL DEFAULT 'Manual', + asset_identifier TEXT NOT NULL DEFAULT 'Default', + cve_id TEXT, + audit_id TEXT UNIQUE, + compliance_tags TEXT, + title TEXT NOT NULL, + description TEXT, + recommended_remediation TEXT, + severity TEXT NOT NULL, + status TEXT DEFAULT 'Waiting to be Triaged' + CHECK(status IN ( + 'Waiting to be Triaged', + 'Returned to Security', + 'Triaged', + 'Assigned Out', + 'Patched', + 'False Positive' +)), + dedupe_hash TEXT UNIQUE NOT NULL, + patch_evidence TEXT, + accessible_to_internet BOOLEAN DEFAULT 0, + assignee TEXT DEFAULT 'Unassigned', + latest_comment TEXT DEFAULT '', + + assigned_at DATETIME, + owner_viewed_at DATETIME, + triage_due_date DATETIME, + remediation_due_date DATETIME, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + patched_at DATETIME, + FOREIGN KEY(domain) REFERENCES domains(name) ON DELETE SET DEFAULT + ); + +CREATE INDEX IF NOT EXISTS idx_tickets_status ON tickets(status); +CREATE INDEX IF NOT EXISTS idx_tickets_severity ON tickets(severity); +CREATE INDEX IF NOT EXISTS idx_tickets_domain ON tickets(domain); +CREATE INDEX IF NOT EXISTS idx_tickets_source_asset ON tickets(source, asset_identifier); + +CREATE TABLE IF NOT EXISTS ticket_assignments ( + ticket_id INTEGER NOT NULL, + assignee TEXT NOT NULL, + role TEXT NOT NULL CHECK(role IN ('RangeHand', 'Wrangler', 'Magistrate')), + PRIMARY KEY (ticket_id, assignee, role), + FOREIGN KEY(ticket_id) REFERENCES tickets(id) ON DELETE CASCADE + ); + +CREATE TABLE IF NOT EXISTS data_adapters ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE, + source_name TEXT NOT NULL, + findings_path TEXT NOT NULL DEFAULT '.', + mapping_title TEXT NOT NULL, + mapping_asset TEXT NOT NULL, + mapping_severity TEXT NOT NULL, + mapping_description TEXT, + mapping_remediation TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE IF NOT EXISTS sync_logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + source TEXT NOT NULL, + status TEXT NOT NULL, + records_processed INTEGER NOT NULL, + error_message TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE IF NOT EXISTS draft_tickets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + report_id TEXT NOT NULL, + title TEXT DEFAULT '', + description TEXT, + severity TEXT DEFAULT 'Medium', + asset_identifier TEXT DEFAULT '', + recommended_remediation TEXT DEFAULT '', + created_at DATETIME DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_draft_tickets_report_id ON draft_tickets(report_id); + +CREATE INDEX IF NOT EXISTS idx_assignments_assignee ON ticket_assignments(assignee); + +CREATE INDEX IF NOT EXISTS idx_tickets_status_asset ON tickets(status, asset_identifier); +CREATE INDEX IF NOT EXISTS idx_tickets_updated_at ON tickets(updated_at); + +CREATE INDEX IF NOT EXISTS idx_tickets_analytics ON tickets(status, severity, source); +CREATE INDEX IF NOT EXISTS idx_tickets_due_dates ON tickets(status, remediation_due_date, triage_due_date); +CREATE INDEX IF NOT EXISTS idx_tickets_source_status ON tickets(source, status); \ No newline at end of file diff --git a/pkg/datastore/sqlite.go b/pkg/datastore/sqlite.go new file mode 100644 index 0000000..82f78f6 --- /dev/null +++ b/pkg/datastore/sqlite.go @@ -0,0 +1,17 @@ +package datastore + +import ( + "database/sql" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +type SQLiteStore struct { + DB *sql.DB +} + +var _ domain.TicketStore = (*SQLiteStore)(nil) + +func NewSQLiteStore(db *sql.DB) *SQLiteStore { + return &SQLiteStore{DB: db} +} diff --git a/pkg/datastore/sqlite_admin.go b/pkg/datastore/sqlite_admin.go new file mode 100644 index 0000000..6f9549a --- /dev/null +++ b/pkg/datastore/sqlite_admin.go @@ -0,0 +1,173 @@ +package datastore + +import ( + "context" + "time" + + domain2 "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (s *SQLiteStore) UpdateAppConfig(ctx context.Context, config domain2.AppConfig) error { + query := ` + INSERT INTO app_config (id, timezone, business_start, business_end, default_extension_days) + VALUES (1, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + timezone = excluded.timezone, + business_start = excluded.business_start, + business_end = excluded.business_end, + default_extension_days = excluded.default_extension_days + ` + _, err := s.DB.ExecContext(ctx, query, config.Timezone, config.BusinessStart, config.BusinessEnd, config.DefaultExtensionDays) + return err +} + +func (s *SQLiteStore) GetAppConfig(ctx context.Context) (domain2.AppConfig, error) { + var c domain2.AppConfig + + query := `SELECT timezone, business_start, business_end, default_extension_days, + backup_enabled, backup_interval_hours, backup_retention_days + FROM app_config WHERE id = 1` + + err := s.DB.QueryRowContext(ctx, query).Scan( + &c.Timezone, &c.BusinessStart, &c.BusinessEnd, &c.DefaultExtensionDays, + &c.Backup.Enabled, &c.Backup.IntervalHours, &c.Backup.RetentionDays, + ) + return c, err +} + +// buildSLAMap creates a fast 2D lookup table: map[Domain][Severity]Policy +func (s *SQLiteStore) buildSLAMap(ctx context.Context) (map[string]map[string]domain2.SLAPolicy, error) { + policies, err := s.GetSLAPolicies(ctx) + if err != nil { + return nil, err + } + + slaMap := make(map[string]map[string]domain2.SLAPolicy) + for _, p := range policies { + if slaMap[p.Domain] == nil { + slaMap[p.Domain] = make(map[string]domain2.SLAPolicy) + } + slaMap[p.Domain][p.Severity] = p + } + return slaMap, nil +} + +func (s *SQLiteStore) ExportSystemState(ctx context.Context) (domain2.ExportState, error) { + var state domain2.ExportState + state.Version = "1.1" + state.ExportedAt = time.Now().UTC().Format(time.RFC3339) + + config, err := s.GetAppConfig(ctx) + if err == nil { + state.AppConfig = config + } + + slas, err := s.GetSLAPolicies(ctx) + if err == nil { + state.SLAPolicies = slas + } + + users, err := s.GetAllUsers(ctx) + if err == nil { + for _, u := range users { + u.PasswordHash = "" + state.Users = append(state.Users, *u) + } + } + + adapters, err := s.GetAdapters(ctx) + if err == nil { + state.Adapters = adapters + } + + query := `SELECT id, domain, source, asset_identifier, title, COALESCE(description, ''), severity, status, dedupe_hash, created_at FROM tickets` + rows, err := s.DB.QueryContext(ctx, query) + if err != nil { + return state, err + } + defer rows.Close() + + for rows.Next() { + var t domain2.Ticket + if err := rows.Scan(&t.ID, &t.Domain, &t.Source, &t.AssetIdentifier, &t.Title, &t.Description, &t.Severity, &t.Status, &t.DedupeHash, &t.CreatedAt); err == nil { + state.Tickets = append(state.Tickets, t) + } + } + + return state, nil +} + +func (s *SQLiteStore) UpdateBackupPolicy(ctx context.Context, policy domain2.BackupPolicy) error { + _, err := s.DB.ExecContext(ctx, ` + UPDATE app_config + SET backup_enabled = ?, backup_interval_hours = ?, backup_retention_days = ? + WHERE id = 1`, + policy.Enabled, policy.IntervalHours, policy.RetentionDays) + return err +} + +func (s *SQLiteStore) GetSLAPolicies(ctx context.Context) ([]domain2.SLAPolicy, error) { + rows, err := s.DB.QueryContext(ctx, "SELECT domain, severity, days_to_remediate, max_extensions, days_to_triage FROM sla_policies ORDER BY domain, severity") + if err != nil { + return nil, err + } + defer rows.Close() + + var policies []domain2.SLAPolicy + for rows.Next() { + var p domain2.SLAPolicy + rows.Scan(&p.Domain, &p.Severity, &p.DaysToRemediate, &p.MaxExtensions, &p.DaysToTriage) + policies = append(policies, p) + } + return policies, nil +} + +func (s *SQLiteStore) UpdateSLAPolicies(ctx context.Context, slas []domain2.SLAPolicy) error { + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + stmt, err := tx.PrepareContext(ctx, ` + UPDATE sla_policies + SET days_to_triage = ?, days_to_remediate = ?, max_extensions = ? + WHERE domain = ? AND severity = ?`) + if err != nil { + return err + } + defer stmt.Close() + + for _, sla := range slas { + _, err = stmt.ExecContext(ctx, sla.DaysToTriage, sla.DaysToRemediate, sla.MaxExtensions, sla.Domain, sla.Severity) + if err != nil { + return err + } + } + + return tx.Commit() +} + +func (s *SQLiteStore) GetWranglers(ctx context.Context) ([]domain2.User, error) { + query := ` + SELECT id, email, full_name, global_role, is_active, created_at + FROM users + WHERE global_role = 'Wrangler' AND is_active = 1 + ORDER BY email ASC + ` + rows, err := s.DB.QueryContext(ctx, query) + if err != nil { + return nil, err + } + defer rows.Close() + + var wranglers []domain2.User + for rows.Next() { + var w domain2.User + if err := rows.Scan(&w.ID, &w.Email, &w.FullName, &w.GlobalRole, &w.IsActive, &w.CreatedAt); err != nil { + return nil, err + } + wranglers = append(wranglers, w) + } + return wranglers, nil +} diff --git a/pkg/datastore/sqlite_analytics.go b/pkg/datastore/sqlite_analytics.go new file mode 100644 index 0000000..8ff7d6a --- /dev/null +++ b/pkg/datastore/sqlite_analytics.go @@ -0,0 +1,357 @@ +package datastore + +import ( + "context" + "fmt" + "time" + + domain2 "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (s *SQLiteStore) GetSheriffAnalytics(ctx context.Context) (domain2.SheriffAnalytics, error) { + var metrics domain2.SheriffAnalytics + + s.DB.QueryRowContext(ctx, "SELECT COUNT(*) FROM tickets WHERE is_cisa_kev = 1 AND status NOT IN ('Patched', 'Risk Accepted', 'False Positive')").Scan(&metrics.ActiveKEVs) + s.DB.QueryRowContext(ctx, "SELECT COUNT(*) FROM tickets WHERE severity = 'Critical' AND status NOT IN ('Patched', 'Risk Accepted', 'False Positive')").Scan(&metrics.OpenCriticals) + s.DB.QueryRowContext(ctx, "SELECT COUNT(*) FROM tickets WHERE remediation_due_date < CURRENT_TIMESTAMP AND status NOT IN ('Patched', 'Risk Accepted', 'False Positive')").Scan(&metrics.TotalOverdue) + + mttrQuery := ` + SELECT COALESCE(AVG(julianday(t.patched_at) - julianday(t.created_at)), 0) + FROM tickets t + WHERE t.status = 'Patched' + ` + var mttrFloat float64 + s.DB.QueryRowContext(ctx, mttrQuery).Scan(&mttrFloat) + metrics.GlobalMTTRDays = int(mttrFloat) + + sourceQuery := ` + SELECT + t.source, + SUM(CASE WHEN t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive') THEN 1 ELSE 0 END) as total_open, + SUM(CASE WHEN t.severity = 'Critical' AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive') THEN 1 ELSE 0 END) as criticals, + SUM(CASE WHEN t.is_cisa_kev = 1 AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive') THEN 1 ELSE 0 END) as cisa_kevs, + SUM(CASE WHEN t.status = 'Waiting to be Triaged' THEN 1 ELSE 0 END) as untriaged, + SUM(CASE WHEN t.remediation_due_date < CURRENT_TIMESTAMP AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive') THEN 1 ELSE 0 END) as patch_overdue, + SUM(CASE WHEN t.status = 'Pending Risk Approval' THEN 1 ELSE 0 END) as pending_risk, + + SUM(CASE WHEN t.status IN ('Patched', 'Risk Accepted', 'False Positive') THEN 1 ELSE 0 END) as total_closed, + SUM(CASE WHEN t.status = 'Patched' THEN 1 ELSE 0 END) as patched, + SUM(CASE WHEN t.status = 'Risk Accepted' THEN 1 ELSE 0 END) as risk_accepted, + SUM(CASE WHEN t.status = 'False Positive' THEN 1 ELSE 0 END) as false_positive + FROM tickets t + GROUP BY t.source + ORDER BY criticals DESC, patch_overdue DESC + ` + rows, err := s.DB.QueryContext(ctx, sourceQuery) + if err == nil { + defer rows.Close() + for rows.Next() { + var sm domain2.SourceMetrics + rows.Scan(&sm.Source, &sm.TotalOpen, &sm.Criticals, &sm.CisaKEVs, &sm.Untriaged, &sm.PatchOverdue, &sm.PendingRisk, &sm.TotalClosed, &sm.Patched, &sm.RiskAccepted, &sm.FalsePositive) + + topAssigneeQ := ` + SELECT COALESCE(ta.assignee, 'Unassigned'), COUNT(t.id) as c + FROM tickets t LEFT JOIN ticket_assignments ta ON t.id = ta.ticket_id + WHERE t.source = ? AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive') + GROUP BY ta.assignee ORDER BY c DESC LIMIT 1` + + var assignee string + var count int + s.DB.QueryRowContext(ctx, topAssigneeQ, sm.Source).Scan(&assignee, &count) + if count > 0 { + sm.TopAssignee = fmt.Sprintf("%s (%d)", assignee, count) + } else { + sm.TopAssignee = "N/A" + } + + if sm.PatchOverdue > 0 { + sm.StrategicNote = "๐Ÿšจ SLA Breach (Escalate to IT Managers)" + } else if sm.Untriaged > 0 { + sm.StrategicNote = "โš ๏ธ Triage Bottleneck (Check Analysts)" + } else if sm.PendingRisk > 0 { + sm.StrategicNote = "โš–๏ธ Blocked by Exec Adjudication" + } else if sm.Criticals > 0 { + sm.StrategicNote = "๐Ÿ”ฅ High Risk (Monitor closely)" + } else if sm.RiskAccepted > sm.Patched && sm.TotalClosed > 0 { + sm.StrategicNote = "๐Ÿ‘€ High Risk Acceptance Rate (Audit Required)" + } else if sm.FalsePositive > sm.Patched && sm.TotalClosed > 0 { + sm.StrategicNote = "๐Ÿ”ง Noisy Source (Scanner needs tuning)" + } else if sm.TotalClosed > 0 { + sm.StrategicNote = "โœ… Healthy Resolution Velocity" + } else { + sm.StrategicNote = "โœ… Routine Processing" + } + + metrics.SourceHealth = append(metrics.SourceHealth, sm) + } + } + + sevQuery := `SELECT severity, COUNT(id) FROM tickets WHERE status NOT IN ('Patched', 'Risk Accepted', 'False Positive') GROUP BY severity` + rowsSev, err := s.DB.QueryContext(ctx, sevQuery) + if err == nil { + defer rowsSev.Close() + for rowsSev.Next() { + var sev string + var count int + rowsSev.Scan(&sev, &count) + metrics.Severity.Total += count + switch sev { + case "Critical": + metrics.Severity.Critical = count + case "High": + metrics.Severity.High = count + case "Medium": + metrics.Severity.Medium = count + case "Low": + metrics.Severity.Low = count + case "Info": + metrics.Severity.Info = count + } + } + if metrics.Severity.Total > 0 { + metrics.Severity.CritPct = int((float64(metrics.Severity.Critical) / float64(metrics.Severity.Total)) * 100) + metrics.Severity.HighPct = int((float64(metrics.Severity.High) / float64(metrics.Severity.Total)) * 100) + metrics.Severity.MedPct = int((float64(metrics.Severity.Medium) / float64(metrics.Severity.Total)) * 100) + metrics.Severity.LowPct = int((float64(metrics.Severity.Low) / float64(metrics.Severity.Total)) * 100) + metrics.Severity.InfoPct = int((float64(metrics.Severity.Info) / float64(metrics.Severity.Total)) * 100) + } + } + + resQuery := `SELECT status, COUNT(id) FROM tickets WHERE status IN ('Patched', 'Risk Accepted', 'False Positive') GROUP BY status` + rowsRes, err := s.DB.QueryContext(ctx, resQuery) + if err == nil { + defer rowsRes.Close() + for rowsRes.Next() { + var status string + var count int + rowsRes.Scan(&status, &count) + metrics.Resolution.Total += count + + switch status { + case "Patched": + metrics.Resolution.Patched = count + case "Risk Accepted": + metrics.Resolution.RiskAccepted = count + case "False Positive": + metrics.Resolution.FalsePositive = count + } + } + + if metrics.Resolution.Total > 0 { + metrics.Resolution.PatchedPct = int((float64(metrics.Resolution.Patched) / float64(metrics.Resolution.Total)) * 100) + metrics.Resolution.RiskAccPct = int((float64(metrics.Resolution.RiskAccepted) / float64(metrics.Resolution.Total)) * 100) + metrics.Resolution.FalsePosPct = int((float64(metrics.Resolution.FalsePositive) / float64(metrics.Resolution.Total)) * 100) + } + } + + assetQuery := `SELECT asset_identifier, COUNT(id) as c FROM tickets WHERE status NOT IN ('Patched', 'Risk Accepted', 'False Positive') GROUP BY asset_identifier ORDER BY c DESC LIMIT 5` + rowsAsset, err := s.DB.QueryContext(ctx, assetQuery) + if err == nil { + defer rowsAsset.Close() + var maxAssetCount int + for rowsAsset.Next() { + var am domain2.AssetMetric + rowsAsset.Scan(&am.Asset, &am.Count) + if maxAssetCount == 0 { + maxAssetCount = am.Count + } + if maxAssetCount > 0 { + am.Percentage = int((float64(am.Count) / float64(maxAssetCount)) * 100) + } + metrics.TopAssets = append(metrics.TopAssets, am) + } + } + + return metrics, nil +} + +func (s *SQLiteStore) GetDashboardTickets(ctx context.Context, tabStatus, filter, assetFilter, userEmail, userRole string, limit, offset int) ([]domain2.Ticket, int, map[string]int, error) { + metrics := map[string]int{ + "critical": 0, + "overdue": 0, + "mine": 0, + "verification": 0, + "returned": 0, + } + + scope := "" + var scopeArgs []any + + if userRole == "Wrangler" { + scope = ` AND LOWER(t.assignee) = LOWER(?)` + scopeArgs = append(scopeArgs, userEmail) + } + + if userRole != "Sheriff" { + var critCount, overCount, mineCount, verifyCount, returnedCount int + + critQ := "SELECT COUNT(t.id) FROM tickets t WHERE t.severity = 'Critical' AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive')" + scope + s.DB.QueryRowContext(ctx, critQ, scopeArgs...).Scan(&critCount) + metrics["critical"] = critCount + + overQ := "SELECT COUNT(t.id) FROM tickets t WHERE t.remediation_due_date < CURRENT_TIMESTAMP AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive')" + scope + s.DB.QueryRowContext(ctx, overQ, scopeArgs...).Scan(&overCount) + metrics["overdue"] = overCount + + mineQ := "SELECT COUNT(t.id) FROM tickets t WHERE LOWER(t.assignee) = LOWER(?) AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive')" + s.DB.QueryRowContext(ctx, mineQ, userEmail).Scan(&mineCount) + metrics["mine"] = mineCount + + verifyQ := "SELECT COUNT(t.id) FROM tickets t WHERE t.status = 'Pending Verification'" + scope + s.DB.QueryRowContext(ctx, verifyQ, scopeArgs...).Scan(&verifyCount) + metrics["verification"] = verifyCount + + retQ := "SELECT COUNT(t.id) FROM tickets t WHERE t.status = 'Returned to Security'" + scope + s.DB.QueryRowContext(ctx, retQ, scopeArgs...).Scan(&returnedCount) + metrics["returned"] = returnedCount + } + + baseQ := "FROM tickets t WHERE 1=1" + scope + var args []any + args = append(args, scopeArgs...) + + if assetFilter != "" { + baseQ += " AND t.asset_identifier = ?" + args = append(args, assetFilter) + } + + if tabStatus == "Waiting to be Triaged" || tabStatus == "holding_pen" { + baseQ += " AND t.status IN ('Waiting to be Triaged', 'Returned to Security', 'Triaged')" + } else if tabStatus == "Exceptions" { + baseQ += " AND t.status NOT IN ('Patched', 'Risk Accepted', 'False Positive')" + } else if tabStatus == "archives" { + baseQ += " AND t.status IN ('Patched', 'Risk Accepted', 'False Positive')" + } else if tabStatus != "" { + baseQ += " AND t.status = ?" + args = append(args, tabStatus) + } + + if filter == "critical" { + baseQ += " AND t.severity = 'Critical'" + } else if filter == "overdue" { + baseQ += " AND t.remediation_due_date < CURRENT_TIMESTAMP" + } else if filter == "mine" { + baseQ += " AND LOWER(t.assignee) = LOWER(?)" + args = append(args, userEmail) + } else if tabStatus == "archives" && filter != "" && filter != "all" { + baseQ += " AND t.status = ?" + args = append(args, filter) + } + + var total int + s.DB.QueryRowContext(ctx, "SELECT COUNT(t.id) "+baseQ, args...).Scan(&total) + + orderClause := "ORDER BY (CASE WHEN t.status = 'Returned to Security' THEN 0 ELSE 1 END) ASC, t.id DESC" + + query := ` + WITH PaginatedIDs AS ( + SELECT t.id ` + baseQ + ` ` + orderClause + ` LIMIT ? OFFSET ? + ) + SELECT + t.id, t.source, t.asset_identifier, t.title, COALESCE(t.description, ''), COALESCE(t.recommended_remediation, ''), t.severity, t.status, + t.triage_due_date, t.remediation_due_date, COALESCE(t.patch_evidence, ''), + t.assignee as current_assignee, + t.owner_viewed_at, + t.updated_at, + CAST(julianday(COALESCE(t.patched_at, t.updated_at)) - julianday(t.created_at) AS INTEGER) as days_to_resolve, + COALESCE(t.latest_comment, '') as latest_comment + FROM PaginatedIDs p + JOIN tickets t ON t.id = p.id + ` + orderClause + + args = append(args, limit, offset) + + rows, err := s.DB.QueryContext(ctx, query, args...) + if err != nil { + return nil, 0, metrics, err + } + defer rows.Close() + + var tickets []domain2.Ticket + for rows.Next() { + var t domain2.Ticket + var assignee string + + err := rows.Scan( + &t.ID, &t.Source, &t.AssetIdentifier, &t.Title, &t.Description, + &t.RecommendedRemediation, &t.Severity, &t.Status, + &t.TriageDueDate, &t.RemediationDueDate, &t.PatchEvidence, + &assignee, + &t.OwnerViewedAt, + &t.UpdatedAt, + &t.DaysToResolve, + &t.LatestComment, + ) + + if err == nil { + t.Assignee = assignee + t.IsOverdue = !t.RemediationDueDate.IsZero() && t.RemediationDueDate.Before(time.Now()) && t.Status != "Patched" && t.Status != "Risk Accepted" + + if tabStatus == "archives" { + if t.DaysToResolve != nil { + t.SLAString = fmt.Sprintf("%d days", *t.DaysToResolve) + } else { + t.SLAString = "Unknown" + } + } else { + t.SLAString = t.RemediationDueDate.Format("Jan 02, 2006") + } + + tickets = append(tickets, t) + } + } + + return tickets, total, metrics, nil +} + +func (s *SQLiteStore) GetGlobalActivityFeed(ctx context.Context, limit int) ([]domain2.FeedItem, error) { + return []domain2.FeedItem{ + { + Actor: "System", + ActivityType: "Info", + NewValue: "Detailed Immutable Audit Logging is a RiskRancher Pro feature. Upgrade to track all ticket lifecycle events.", + TimeAgo: "Just now", + }, + }, nil +} + +func (s *SQLiteStore) GetAnalyticsSummary(ctx context.Context) (map[string]int, error) { + summary := make(map[string]int) + + var total int + err := s.DB.QueryRowContext(ctx, `SELECT COUNT(*) FROM tickets WHERE status != 'Patched' AND status != 'Risk Accepted'`).Scan(&total) + if err != nil { + return nil, err + } + summary["Total_Open"] = total + + sourceRows, err := s.DB.QueryContext(ctx, `SELECT source, COUNT(*) FROM tickets WHERE status != 'Patched' AND status != 'Risk Accepted' GROUP BY source`) + if err == nil { + defer sourceRows.Close() + for sourceRows.Next() { + var source string + var count int + if err := sourceRows.Scan(&source, &count); err == nil { + summary["Source_"+source+"_Open"] = count + } + } + } + + sevRows, err := s.DB.QueryContext(ctx, `SELECT severity, COUNT(*) FROM tickets WHERE status != 'Patched' AND status != 'Risk Accepted' GROUP BY severity`) + if err == nil { + defer sevRows.Close() + for sevRows.Next() { + var sev string + var count int + if err := sevRows.Scan(&sev, &count); err == nil { + summary["Severity_"+sev+"_Open"] = count + } + } + } + + return summary, nil +} + +func (s *SQLiteStore) GetPaginatedActivityFeed(ctx context.Context, filter string, limit, offset int) ([]domain2.FeedItem, int, error) { + return []domain2.FeedItem{}, 0, nil +} diff --git a/pkg/datastore/sqlite_drafts.go b/pkg/datastore/sqlite_drafts.go new file mode 100644 index 0000000..785427e --- /dev/null +++ b/pkg/datastore/sqlite_drafts.go @@ -0,0 +1,109 @@ +package datastore + +import ( + "context" + "fmt" + + domain2 "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (s *SQLiteStore) SaveDraft(ctx context.Context, d domain2.DraftTicket) error { + query := ` + INSERT INTO draft_tickets (report_id, title, description, severity, asset_identifier, recommended_remediation) + VALUES (?, ?, ?, ?, ?, ?)` + + _, err := s.DB.ExecContext(ctx, query, + d.ReportID, d.Title, d.Description, d.Severity, d.AssetIdentifier, d.RecommendedRemediation) + return err +} + +func (s *SQLiteStore) GetDraftsByReport(ctx context.Context, reportID string) ([]domain2.DraftTicket, error) { + + query := `SELECT id, report_id, COALESCE(title, ''), COALESCE(description, ''), COALESCE(severity, 'Medium'), COALESCE(asset_identifier, ''), COALESCE(recommended_remediation, '') + FROM draft_tickets WHERE report_id = ?` + + rows, err := s.DB.QueryContext(ctx, query, reportID) + if err != nil { + return nil, err + } + defer rows.Close() + + var drafts []domain2.DraftTicket + for rows.Next() { + var d domain2.DraftTicket + if err := rows.Scan(&d.ID, &d.ReportID, &d.Title, &d.Description, &d.Severity, &d.AssetIdentifier, &d.RecommendedRemediation); err == nil { + drafts = append(drafts, d) + } + } + + if drafts == nil { + drafts = []domain2.DraftTicket{} + } + return drafts, nil +} + +func (s *SQLiteStore) DeleteDraft(ctx context.Context, draftID string) error { + query := `DELETE FROM draft_tickets WHERE id = ?` + _, err := s.DB.ExecContext(ctx, query, draftID) + return err +} + +func (s *SQLiteStore) UpdateDraft(ctx context.Context, draftID int, payload domain2.Ticket) error { + query := `UPDATE draft_tickets SET title = ?, severity = ?, asset_identifier = ?, description = ?, recommended_remediation = ? WHERE id = ?` + + _, err := s.DB.ExecContext( + ctx, + query, + payload.Title, + payload.Severity, + payload.AssetIdentifier, + payload.Description, + payload.RecommendedRemediation, + draftID, + ) + + return err +} + +func (s *SQLiteStore) PromotePentestDrafts(ctx context.Context, reportID string, analystEmail string, tickets []domain2.Ticket) error { + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + for _, t := range tickets { + hash := fmt.Sprintf("manual-pentest-%s-%s", t.AssetIdentifier, t.Title) + + res, err := tx.ExecContext(ctx, ` + INSERT INTO tickets ( + source, asset_identifier, title, description, recommended_remediation, severity, status, dedupe_hash, + triage_due_date, remediation_due_date, created_at, updated_at + ) + VALUES (?, ?, ?, ?, ?, ?, 'Waiting to be Triaged', ?, DATETIME('now', '+3 days'), DATETIME('now', '+14 days'), CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) + `, "Manual Pentest", t.AssetIdentifier, t.Title, t.Description, t.RecommendedRemediation, t.Severity, hash) + if err != nil { + return err + } + + ticketID, err := res.LastInsertId() + if err != nil { + return err + } + + _, err = tx.ExecContext(ctx, ` + INSERT INTO ticket_assignments (ticket_id, assignee, role) + VALUES (?, ?, 'RangeHand') + `, ticketID, analystEmail) + if err != nil { + return err + } + } + + _, err = tx.ExecContext(ctx, "DELETE FROM draft_tickets WHERE report_id = ?", reportID) + if err != nil { + return err + } + + return tx.Commit() +} diff --git a/pkg/datastore/sqlite_ingest.go b/pkg/datastore/sqlite_ingest.go new file mode 100644 index 0000000..d6af8d5 --- /dev/null +++ b/pkg/datastore/sqlite_ingest.go @@ -0,0 +1,284 @@ +package datastore + +import ( + "context" + "database/sql" + "time" + + domain2 "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (s *SQLiteStore) IngestTickets(ctx context.Context, tickets []domain2.Ticket) error { + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + _, err = tx.ExecContext(ctx, ` + CREATE TEMP TABLE IF NOT EXISTS staging_tickets ( + domain TEXT, source TEXT, asset_identifier TEXT, title TEXT, + description TEXT, recommended_remediation TEXT, severity TEXT, + status TEXT, dedupe_hash TEXT + ) + `) + if err != nil { + return err + } + tx.ExecContext(ctx, `DELETE FROM staging_tickets`) + + stmt, err := tx.PrepareContext(ctx, ` + INSERT INTO staging_tickets (domain, source, asset_identifier, title, description, recommended_remediation, severity, status, dedupe_hash) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + `) + if err != nil { + return err + } + + for _, t := range tickets { + status := t.Status + if status == "" { + status = "Waiting to be Triaged" + } + domain := t.Domain + if domain == "" { + domain = "Vulnerability" + } + source := t.Source + if source == "" { + source = "Manual" + } + + _, err = stmt.ExecContext(ctx, domain, source, t.AssetIdentifier, t.Title, t.Description, t.RecommendedRemediation, t.Severity, status, t.DedupeHash) + if err != nil { + stmt.Close() + return err + } + } + stmt.Close() + + _, err = tx.ExecContext(ctx, ` + INSERT INTO tickets (domain, source, asset_identifier, title, description, recommended_remediation, severity, status, dedupe_hash) + SELECT domain, source, asset_identifier, title, description, recommended_remediation, severity, status, dedupe_hash + FROM staging_tickets + WHERE true -- Prevents SQLite from mistaking 'ON CONFLICT' for a JOIN condition + ON CONFLICT(dedupe_hash) DO UPDATE SET + description = excluded.description, + updated_at = CURRENT_TIMESTAMP + `) + if err != nil { + return err + } + + tx.ExecContext(ctx, `DROP TABLE staging_tickets`) + return tx.Commit() +} + +func (s *SQLiteStore) GetAdapters(ctx context.Context) ([]domain2.Adapter, error) { + rows, err := s.DB.QueryContext(ctx, "SELECT id, name, source_name, findings_path, mapping_title, mapping_asset, mapping_severity, mapping_description, mapping_remediation FROM data_adapters") + if err != nil { + return nil, err + } + defer rows.Close() + + var adapters []domain2.Adapter + for rows.Next() { + var a domain2.Adapter + rows.Scan(&a.ID, &a.Name, &a.SourceName, &a.FindingsPath, &a.MappingTitle, &a.MappingAsset, &a.MappingSeverity, &a.MappingDescription, &a.MappingRemediation) + adapters = append(adapters, a) + } + return adapters, nil +} + +func (s *SQLiteStore) SaveAdapter(ctx context.Context, a domain2.Adapter) error { + _, err := s.DB.ExecContext(ctx, ` + INSERT INTO data_adapters (name, source_name, findings_path, mapping_title, mapping_asset, mapping_severity, mapping_description, mapping_remediation) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, + a.Name, a.SourceName, a.FindingsPath, a.MappingTitle, a.MappingAsset, a.MappingSeverity, a.MappingDescription, a.MappingRemediation) + return err +} + +func (s *SQLiteStore) GetAdapterByID(ctx context.Context, id int) (domain2.Adapter, error) { + var a domain2.Adapter + query := ` + SELECT + id, name, source_name, findings_path, + mapping_title, mapping_asset, mapping_severity, + IFNULL(mapping_description, ''), IFNULL(mapping_remediation, ''), + created_at, updated_at + FROM data_adapters + WHERE id = ?` + + err := s.DB.QueryRowContext(ctx, query, id).Scan( + &a.ID, &a.Name, &a.SourceName, &a.FindingsPath, + &a.MappingTitle, &a.MappingAsset, &a.MappingSeverity, + &a.MappingDescription, &a.MappingRemediation, + &a.CreatedAt, &a.UpdatedAt, + ) + return a, err +} + +func (s *SQLiteStore) DeleteAdapter(ctx context.Context, id int) error { + _, err := s.DB.ExecContext(ctx, "DELETE FROM data_adapters WHERE id = ?", id) + return err +} + +func (s *SQLiteStore) GetAdapterByName(ctx context.Context, name string) (domain2.Adapter, error) { + var a domain2.Adapter + query := ` + SELECT + id, name, source_name, findings_path, + mapping_title, mapping_asset, mapping_severity, + IFNULL(mapping_description, ''), IFNULL(mapping_remediation, '') + FROM data_adapters + WHERE name = ?` + + err := s.DB.QueryRowContext(ctx, query, name).Scan( + &a.ID, &a.Name, &a.SourceName, &a.FindingsPath, + &a.MappingTitle, &a.MappingAsset, &a.MappingSeverity, + &a.MappingDescription, &a.MappingRemediation, + ) + return a, err +} + +func (s *SQLiteStore) ProcessIngestionBatch(ctx context.Context, source, asset string, incoming []domain2.Ticket) error { + slaMap, _ := s.buildSLAMap(ctx) + + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + for i := range incoming { + if incoming[i].Domain == "" { + incoming[i].Domain = "Vulnerability" + } + if incoming[i].Status == "" { + incoming[i].Status = "Waiting to be Triaged" + } + } + + inserts, reopens, updates, closes, err := s.calculateDiffState(ctx, tx, source, asset, incoming) + if err != nil { + return err + } + + if err := s.executeBatchMutations(ctx, tx, source, asset, slaMap, inserts, reopens, updates, closes); err != nil { + return err + } + + return tx.Commit() +} + +func (s *SQLiteStore) calculateDiffState(ctx context.Context, tx *sql.Tx, source, asset string, incoming []domain2.Ticket) (inserts, reopens, descUpdates []domain2.Ticket, autocloses []string, err error) { + rows, err := tx.QueryContext(ctx, `SELECT dedupe_hash, status, COALESCE(description, '') FROM tickets WHERE source = ? AND asset_identifier = ?`, source, asset) + if err != nil { + return nil, nil, nil, nil, err + } + defer rows.Close() + + type existingRecord struct{ status, description string } + existingMap := make(map[string]existingRecord) + for rows.Next() { + var hash, status, desc string + if err := rows.Scan(&hash, &status, &desc); err == nil { + existingMap[hash] = existingRecord{status: status, description: desc} + } + } + + incomingMap := make(map[string]bool) + for _, ticket := range incoming { + incomingMap[ticket.DedupeHash] = true + existing, exists := existingMap[ticket.DedupeHash] + if !exists { + inserts = append(inserts, ticket) + } else { + if existing.status == "Patched" { + reopens = append(reopens, ticket) + } + if ticket.Description != "" && ticket.Description != existing.description && existing.status != "Patched" && existing.status != "Risk Accepted" && existing.status != "False Positive" { + descUpdates = append(descUpdates, ticket) + } + } + } + + for hash, record := range existingMap { + if !incomingMap[hash] && record.status != "Patched" && record.status != "Risk Accepted" && record.status != "False Positive" { + autocloses = append(autocloses, hash) + } + } + return inserts, reopens, descUpdates, autocloses, nil +} + +func (s *SQLiteStore) executeBatchMutations(ctx context.Context, tx *sql.Tx, source, asset string, slaMap map[string]map[string]domain2.SLAPolicy, inserts, reopens, descUpdates []domain2.Ticket, autocloses []string) error { + now := time.Now() + + // A. Inserts + if len(inserts) > 0 { + insertStmt, err := tx.PrepareContext(ctx, `INSERT INTO tickets (source, asset_identifier, title, severity, description, status, dedupe_hash, domain, triage_due_date, remediation_due_date) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`) + if err != nil { + return err + } + defer insertStmt.Close() + + for _, t := range inserts { + daysToTriage, daysToRemediate := 3, 30 + if dMap, ok := slaMap[t.Domain]; ok { + if policy, ok := dMap[t.Severity]; ok { + daysToTriage, daysToRemediate = policy.DaysToTriage, policy.DaysToRemediate + } + } + _, err := insertStmt.ExecContext(ctx, source, asset, t.Title, t.Severity, t.Description, t.Status, t.DedupeHash, t.Domain, now.AddDate(0, 0, daysToTriage), now.AddDate(0, 0, daysToRemediate)) + if err != nil { + return err + } + } + } + + if len(reopens) > 0 { + updateStmt, _ := tx.PrepareContext(ctx, `UPDATE tickets SET status = 'Waiting to be Triaged', patched_at = NULL, triage_due_date = ?, remediation_due_date = ? WHERE dedupe_hash = ?`) + defer updateStmt.Close() + for _, t := range reopens { + updateStmt.ExecContext(ctx, now.AddDate(0, 0, 3), now.AddDate(0, 0, 30), t.DedupeHash) // Using default SLAs for fallback + } + } + + if len(descUpdates) > 0 { + descStmt, _ := tx.PrepareContext(ctx, `UPDATE tickets SET description = ? WHERE dedupe_hash = ?`) + defer descStmt.Close() + for _, t := range descUpdates { + descStmt.ExecContext(ctx, t.Description, t.DedupeHash) + } + } + + if len(autocloses) > 0 { + closeStmt, _ := tx.PrepareContext(ctx, `UPDATE tickets SET status = 'Patched', patched_at = CURRENT_TIMESTAMP WHERE dedupe_hash = ?`) + defer closeStmt.Close() + for _, hash := range autocloses { + closeStmt.ExecContext(ctx, hash) + } + } + + return nil +} + +func (s *SQLiteStore) LogSync(ctx context.Context, source, status string, records int, errMsg string) error { + _, err := s.DB.ExecContext(ctx, `INSERT INTO sync_logs (source, status, records_processed, error_message) VALUES (?, ?, ?, ?)`, source, status, records, errMsg) + return err +} + +func (s *SQLiteStore) GetRecentSyncLogs(ctx context.Context, limit int) ([]domain2.SyncLog, error) { + rows, err := s.DB.QueryContext(ctx, `SELECT id, source, status, records_processed, IFNULL(error_message, ''), created_at FROM sync_logs ORDER BY id DESC LIMIT ?`, limit) + if err != nil { + return nil, err + } + defer rows.Close() + var logs []domain2.SyncLog + for rows.Next() { + var l domain2.SyncLog + rows.Scan(&l.ID, &l.Source, &l.Status, &l.RecordsProcessed, &l.ErrorMessage, &l.CreatedAt) + logs = append(logs, l) + } + return logs, nil +} diff --git a/pkg/datastore/sqlite_tickets.go b/pkg/datastore/sqlite_tickets.go new file mode 100644 index 0000000..7d243a6 --- /dev/null +++ b/pkg/datastore/sqlite_tickets.go @@ -0,0 +1,131 @@ +package datastore + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "fmt" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (s *SQLiteStore) GetTickets(ctx context.Context) ([]domain.Ticket, error) { + rows, err := s.DB.QueryContext(ctx, "SELECT id, title, severity, status FROM tickets LIMIT 100") + if err != nil { + return nil, err + } + defer rows.Close() + + var tickets []domain.Ticket + for rows.Next() { + var t domain.Ticket + rows.Scan(&t.ID, &t.Title, &t.Severity, &t.Status) + tickets = append(tickets, t) + } + return tickets, nil +} + +func (s *SQLiteStore) CreateTicket(ctx context.Context, t *domain.Ticket) error { + if t.Status == "" { + t.Status = "Waiting to be Triaged" + } + if t.Domain == "" { + t.Domain = "Vulnerability" + } + if t.Source == "" { + t.Source = "Manual" + } + if t.AssetIdentifier == "" { + t.AssetIdentifier = "Default" + } + + rawHash := fmt.Sprintf("%s-%s-%s-%s", t.Source, t.AssetIdentifier, t.Title, t.Severity) + hashBytes := sha256.Sum256([]byte(rawHash)) + t.DedupeHash = hex.EncodeToString(hashBytes[:]) + + query := ` + INSERT INTO tickets ( + domain, source, asset_identifier, title, description, recommended_remediation, + severity, status, dedupe_hash, + triage_due_date, remediation_due_date, created_at, updated_at + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, DATETIME('now', '+3 days'), DATETIME('now', '+14 days'), CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) + ` + + res, err := s.DB.ExecContext(ctx, query, + t.Domain, t.Source, t.AssetIdentifier, t.Title, t.Description, t.RecommendedRemediation, + t.Severity, t.Status, t.DedupeHash, + ) + + if err != nil { + return err + } + + id, _ := res.LastInsertId() + t.ID = int(id) + return nil +} + +// UpdateTicketInline handles a single UI edit and updates the flattened comment tracking +func (s *SQLiteStore) UpdateTicketInline(ctx context.Context, ticketID int, severity, description, remediation, comment, actor, status, assignee string) error { + query := ` + UPDATE tickets + SET severity = ?, description = ?, recommended_remediation = ?, + status = ?, assignee = ?, + latest_comment = CASE WHEN ? != '' THEN ? ELSE latest_comment END, + updated_at = CURRENT_TIMESTAMP + WHERE id = ?` + + formattedComment := "" + if comment != "" { + formattedComment = "[" + actor + "] " + comment + } + + _, err := s.DB.ExecContext(ctx, query, severity, description, remediation, status, assignee, formattedComment, formattedComment, ticketID) + return err +} + +// RejectTicketFromWrangler puts a ticket back into the Holding Pen +func (s *SQLiteStore) RejectTicketFromWrangler(ctx context.Context, ticketIDs []int, reason, comment string) error { + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + for _, id := range ticketIDs { + fullComment := "[Wrangler Reject: " + reason + "] " + comment + _, err := tx.ExecContext(ctx, "UPDATE tickets SET status = 'Returned to Security', assignee = 'Unassigned', latest_comment = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?", fullComment, id) + if err != nil { + return err + } + } + return tx.Commit() +} + +func (s *SQLiteStore) GetTicketByID(ctx context.Context, id int) (domain.Ticket, error) { + var t domain.Ticket + var triageDue, remDue, created, updated string + var patchedAt *string + + query := `SELECT id, domain, source, asset_identifier, title, description, recommended_remediation, severity, status, dedupe_hash, triage_due_date, remediation_due_date, created_at, updated_at, patched_at, assignee, latest_comment FROM tickets WHERE id = ?` + + err := s.DB.QueryRowContext(ctx, query, id).Scan( + &t.ID, &t.Domain, &t.Source, &t.AssetIdentifier, &t.Title, &t.Description, &t.RecommendedRemediation, &t.Severity, &t.Status, &t.DedupeHash, &triageDue, &remDue, &created, &updated, &patchedAt, &t.Assignee, &t.LatestComment, + ) + if err != nil { + return t, err + } + + t.TriageDueDate, _ = time.Parse(time.RFC3339, triageDue) + t.RemediationDueDate, _ = time.Parse(time.RFC3339, remDue) + t.CreatedAt, _ = time.Parse(time.RFC3339, created) + t.UpdatedAt, _ = time.Parse(time.RFC3339, updated) + + if patchedAt != nil { + pTime, _ := time.Parse(time.RFC3339, *patchedAt) + t.PatchedAt = &pTime + } + + return t, nil +} diff --git a/pkg/domain/adapter.go b/pkg/domain/adapter.go new file mode 100644 index 0000000..700cdae --- /dev/null +++ b/pkg/domain/adapter.go @@ -0,0 +1,16 @@ +package domain + +// Adapter represents a saved mapping profile for a specific scanner +type Adapter struct { + ID int `json:"id"` + Name string `json:"name"` + SourceName string `json:"source_name"` + FindingsPath string `json:"findings_path"` + MappingTitle string `json:"mapping_title"` + MappingAsset string `json:"mapping_asset"` + MappingSeverity string `json:"mapping_severity"` + MappingDescription string `json:"mapping_description"` + MappingRemediation string `json:"mapping_remediation"` + CreatedAt string `json:"created_at"` + UpdatedAt string `json:"updated_at"` +} diff --git a/pkg/domain/analytics.go b/pkg/domain/analytics.go new file mode 100644 index 0000000..099c43b --- /dev/null +++ b/pkg/domain/analytics.go @@ -0,0 +1,74 @@ +package domain + +type ResolutionMetrics struct { + Total int + Patched int + RiskAccepted int + FalsePositive int + PatchedPct int + RiskAccPct int + FalsePosPct int +} + +type SheriffAnalytics struct { + ActiveKEVs int + GlobalMTTRDays int + OpenCriticals int + TotalOverdue int + SourceHealth []SourceMetrics + Resolution ResolutionMetrics + Severity SeverityMetrics + TopAssets []AssetMetric +} + +type SourceMetrics struct { + Source string + TotalOpen int + Criticals int + CisaKEVs int + Untriaged int + PatchOverdue int + PendingRisk int + TotalClosed int + Patched int + RiskAccepted int + FalsePositive int + TopAssignee string + StrategicNote string +} + +type FeedItem struct { + Actor string + ActivityType string + NewValue string + TimeAgo string +} + +type SeverityMetrics struct { + Critical int + High int + Medium int + Low int + Info int + Total int + CritPct int + HighPct int + MedPct int + LowPct int + InfoPct int +} + +type AssetMetric struct { + Asset string + Count int + Percentage int +} + +type SyncLog struct { + ID int `json:"id"` + Source string `json:"source"` + Status string `json:"status"` + RecordsProcessed int `json:"records_processed"` + ErrorMessage string `json:"error_message"` + CreatedAt string `json:"created_at"` +} diff --git a/pkg/domain/auth.go b/pkg/domain/auth.go new file mode 100644 index 0000000..e8d9072 --- /dev/null +++ b/pkg/domain/auth.go @@ -0,0 +1,18 @@ +package domain + +import "time" + +type User struct { + ID int `json:"id"` + Email string `json:"email"` + FullName string `json:"full_name"` + PasswordHash string `json:"-"` + GlobalRole string `json:"global_role"` + IsActive bool `json:"is_active"` + CreatedAt time.Time `json:"created_at"` +} +type Session struct { + Token string `json:"token"` + UserID int `json:"user_id"` + ExpiresAt time.Time `json:"expires_at"` +} diff --git a/pkg/domain/config.go b/pkg/domain/config.go new file mode 100644 index 0000000..77e66c7 --- /dev/null +++ b/pkg/domain/config.go @@ -0,0 +1,15 @@ +package domain + +type AppConfig struct { + Timezone string `json:"timezone"` + BusinessStart int `json:"business_start"` + BusinessEnd int `json:"business_end"` + DefaultExtensionDays int `json:"default_extension_days"` + Backup BackupPolicy `json:"backup"` +} + +type BackupPolicy struct { + Enabled bool `json:"enabled"` + IntervalHours int `json:"interval_hours"` + RetentionDays int `json:"retention_days"` +} diff --git a/pkg/domain/connector.go b/pkg/domain/connector.go new file mode 100644 index 0000000..9dbbcc0 --- /dev/null +++ b/pkg/domain/connector.go @@ -0,0 +1,16 @@ +package domain + +// ConnectorTemplate defines how to translate third-party JSON into ticket format +type ConnectorTemplate struct { + ID string `json:"id"` + Name string `json:"name"` + SourceDefault string `json:"source_default"` + FindingsArrayPath string `json:"findings_array_path"` + FieldMappings struct { + Title string `json:"title"` + AssetIdentifier string `json:"asset_identifier"` + Severity string `json:"severity"` + Description string `json:"description"` + RecommendedRemediation string `json:"recommended_remediation"` + } `json:"field_mappings"` +} diff --git a/pkg/domain/drafts.go b/pkg/domain/drafts.go new file mode 100644 index 0000000..108a3d0 --- /dev/null +++ b/pkg/domain/drafts.go @@ -0,0 +1,11 @@ +package domain + +type DraftTicket struct { + ID int `json:"id"` + ReportID string `json:"report_id"` + Title string `json:"title"` + Description string `json:"description"` + Severity string `json:"severity"` + AssetIdentifier string `json:"asset_identifier"` + RecommendedRemediation string `json:"recommended_remediation"` +} diff --git a/pkg/domain/export.go b/pkg/domain/export.go new file mode 100644 index 0000000..3888691 --- /dev/null +++ b/pkg/domain/export.go @@ -0,0 +1,11 @@ +package domain + +type ExportState struct { + AppConfig AppConfig `json:"app_config"` + SLAPolicies []SLAPolicy `json:"sla_policies"` + Users []User `json:"users"` + Adapters []Adapter `json:"adapters"` + Tickets []Ticket `json:"tickets"` + Version string `json:"export_version"` + ExportedAt string `json:"exported_at"` +} diff --git a/pkg/domain/store.go b/pkg/domain/store.go new file mode 100644 index 0000000..4a3af16 --- /dev/null +++ b/pkg/domain/store.go @@ -0,0 +1,95 @@ +package domain + +import ( + "context" + "net/http" + "time" +) + +// Store embeds all sub interfaces for Core +type Store interface { + TicketStore + IdentityStore + IngestStore + ConfigStore + AnalyticsStore + DraftStore +} + +// TicketStore: Core CRUD and Workflow +type TicketStore interface { + GetTickets(ctx context.Context) ([]Ticket, error) + GetDashboardTickets(ctx context.Context, tabStatus, filter, assetFilter, userEmail, userRole string, limit, offset int) ([]Ticket, int, map[string]int, error) + CreateTicket(ctx context.Context, t *Ticket) error + GetTicketByID(ctx context.Context, id int) (Ticket, error) + UpdateTicketInline(ctx context.Context, ticketID int, severity, description, remediation, comment, actor, status, assignee string) error +} + +// IdentityStore: Users, Sessions, and Dispatching +type IdentityStore interface { + CreateUser(ctx context.Context, email, fullName, passwordHash, globalRole string) (*User, error) + GetUserByEmail(ctx context.Context, email string) (*User, error) + GetUserByID(ctx context.Context, id int) (*User, error) + GetAllUsers(ctx context.Context) ([]*User, error) + GetUserCount(ctx context.Context) (int, error) + UpdateUserPassword(ctx context.Context, id int, newPasswordHash string) error + UpdateUserRole(ctx context.Context, id int, newRole string) error + DeactivateUserAndReassign(ctx context.Context, userID int) error + + CreateSession(ctx context.Context, token string, userID int, expiresAt time.Time) error + GetSession(ctx context.Context, token string) (*Session, error) + DeleteSession(ctx context.Context, token string) error + + GetWranglers(ctx context.Context) ([]User, error) +} + +// IngestStore: Scanners, Adapters, and Sync History +type IngestStore interface { + IngestTickets(ctx context.Context, tickets []Ticket) error + ProcessIngestionBatch(ctx context.Context, source string, assetIdentifier string, incoming []Ticket) error + + GetAdapters(ctx context.Context) ([]Adapter, error) + GetAdapterByID(ctx context.Context, id int) (Adapter, error) + GetAdapterByName(ctx context.Context, name string) (Adapter, error) + SaveAdapter(ctx context.Context, adapter Adapter) error + DeleteAdapter(ctx context.Context, id int) error + + LogSync(ctx context.Context, source, status string, records int, errMsg string) error + GetRecentSyncLogs(ctx context.Context, limit int) ([]SyncLog, error) +} + +// ConfigStore: Global System Settings +type ConfigStore interface { + GetAppConfig(ctx context.Context) (AppConfig, error) + UpdateAppConfig(ctx context.Context, config AppConfig) error + GetSLAPolicies(ctx context.Context) ([]SLAPolicy, error) + UpdateSLAPolicies(ctx context.Context, slas []SLAPolicy) error + UpdateBackupPolicy(ctx context.Context, policy BackupPolicy) error + ExportSystemState(ctx context.Context) (ExportState, error) +} + +// AnalyticsStore: Audit Logs and KPI Metrics +type AnalyticsStore interface { + GetSheriffAnalytics(ctx context.Context) (SheriffAnalytics, error) + GetAnalyticsSummary(ctx context.Context) (map[string]int, error) + GetGlobalActivityFeed(ctx context.Context, limit int) ([]FeedItem, error) + GetPaginatedActivityFeed(ctx context.Context, filter string, limit int, offset int) ([]FeedItem, int, error) +} + +// DraftStore: The Pentest Desk OSS, word docx +type DraftStore interface { + SaveDraft(ctx context.Context, draft DraftTicket) error + GetDraftsByReport(ctx context.Context, reportID string) ([]DraftTicket, error) + DeleteDraft(ctx context.Context, draftID string) error + UpdateDraft(ctx context.Context, draftID int, payload Ticket) error + PromotePentestDrafts(ctx context.Context, reportID string, analystEmail string, tickets []Ticket) error +} + +type Authenticator interface { + Middleware(next http.Handler) http.Handler +} + +type SLACalculator interface { + CalculateDueDate(severity string) *time.Time + CalculateTrueSLAHours(ctx context.Context, ticketID int, store Store) (float64, error) +} diff --git a/pkg/domain/ticket.go b/pkg/domain/ticket.go new file mode 100644 index 0000000..2b4d85b --- /dev/null +++ b/pkg/domain/ticket.go @@ -0,0 +1,61 @@ +package domain + +import ( + "time" +) + +// SLAPolicy represents the global SLA configuration per severity +type SLAPolicy struct { + Domain string `json:"domain"` + Severity string `json:"severity"` + DaysToRemediate int `json:"days_to_remediate"` + MaxExtensions int `json:"max_extensions"` + DaysToTriage int `json:"days_to_triage"` +} + +// AssetRiskSummary holds the rolled-up vulnerability counts for a single asset +type AssetRiskSummary struct { + AssetIdentifier string + TotalActive int + Critical int + High int + Medium int + Low int + Info int +} + +type Ticket struct { + ID int `json:"id"` + Domain string `json:"domain"` + IsOverdue bool `json:"is_overdue"` + DaysToResolve *int `json:"days_to_resolve"` + Source string `json:"source"` + AssetIdentifier string `json:"asset_identifier"` + Title string `json:"title"` + Description string `json:"description"` + RecommendedRemediation string `json:"recommended_remediation"` + Severity string `json:"severity"` + Status string `json:"status"` + + DedupeHash string `json:"dedupe_hash"` + + PatchEvidence *string `json:"patch_evidence"` + OwnerViewedAt *time.Time `json:"owner_viewed_at"` + + TriageDueDate time.Time `json:"triage_due_date"` + RemediationDueDate time.Time `json:"remediation_due_date"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + PatchedAt *time.Time `json:"patched_at"` + + SLAString string `json:"sla_string"` + Assignee string `json:"assignee"` + LatestComment string `json:"latest_comment"` +} + +// TicketAssignment represents the many-to-many relationship +type TicketAssignment struct { + TicketID int `json:"ticket_id"` + Assignee string `json:"assignee"` + Role string `json:"role"` +} diff --git a/pkg/ingest/handler.go b/pkg/ingest/handler.go new file mode 100644 index 0000000..edcb33a --- /dev/null +++ b/pkg/ingest/handler.go @@ -0,0 +1,13 @@ +package ingest + +import ( + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +type Handler struct { + Store domain.Store +} + +func NewHandler(store domain.Store) *Handler { + return &Handler{Store: store} +} diff --git a/pkg/ingest/ingest.go b/pkg/ingest/ingest.go new file mode 100644 index 0000000..3e7f678 --- /dev/null +++ b/pkg/ingest/ingest.go @@ -0,0 +1,163 @@ +package ingest + +import ( + "crypto/sha256" + "encoding/csv" + "encoding/hex" + "encoding/json" + "log" + "net/http" + "strconv" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (h *Handler) HandleIngest(w http.ResponseWriter, r *http.Request) { + decoder := json.NewDecoder(r.Body) + _, err := decoder.Token() + if err != nil { + http.Error(w, "Invalid JSON payload: expected array", http.StatusBadRequest) + return + } + + type groupKey struct { + Source string + Asset string + } + groupedTickets := make(map[groupKey][]domain.Ticket) + for decoder.More() { + var ticket domain.Ticket + if err := decoder.Decode(&ticket); err != nil { + http.Error(w, "Error parsing ticket object", http.StatusBadRequest) + return + } + + if ticket.Status == "" { + ticket.Status = "Waiting to be Triaged" + } + + if ticket.DedupeHash == "" { + hashInput := ticket.Source + "|" + ticket.AssetIdentifier + "|" + ticket.Title + hash := sha256.Sum256([]byte(hashInput)) + ticket.DedupeHash = hex.EncodeToString(hash[:]) + } + + key := groupKey{ + Source: ticket.Source, + Asset: ticket.AssetIdentifier, + } + groupedTickets[key] = append(groupedTickets[key], ticket) + } + + _, err = decoder.Token() + if err != nil { + http.Error(w, "Invalid JSON payload termination", http.StatusBadRequest) + return + } + + for key, batch := range groupedTickets { + err := h.Store.ProcessIngestionBatch(r.Context(), key.Source, key.Asset, batch) + if err != nil { + log.Printf("๐Ÿ”ฅ Ingestion DB Error for Asset %s: %v", key.Asset, err) + h.Store.LogSync(r.Context(), key.Source, "Failed", len(batch), err.Error()) + http.Error(w, "Database error processing batch", http.StatusInternalServerError) + return + } else { + h.Store.LogSync(r.Context(), key.Source, "Success", len(batch), "") + } + } + + w.WriteHeader(http.StatusCreated) +} + +func (h *Handler) HandleCSVIngest(w http.ResponseWriter, r *http.Request) { + if err := r.ParseMultipartForm(10 << 20); err != nil { + http.Error(w, "Failed to parse form", http.StatusBadRequest) + return + } + + adapterIDStr := r.FormValue("adapter_id") + adapterID, err := strconv.Atoi(adapterIDStr) + if err != nil { + http.Error(w, "Invalid adapter_id", http.StatusBadRequest) + return + } + + adapter, err := h.Store.GetAdapterByID(r.Context(), adapterID) + if err != nil { + http.Error(w, "Adapter mapping not found", http.StatusNotFound) + return + } + + file, _, err := r.FormFile("file") + if err != nil { + http.Error(w, "Failed to read file payload", http.StatusBadRequest) + return + } + defer file.Close() + + reader := csv.NewReader(file) + records, err := reader.ReadAll() + if err != nil || len(records) < 2 { + http.Error(w, "Invalid or empty CSV format", http.StatusBadRequest) + return + } + + headers := records[0] + headerMap := make(map[string]int) + for i, h := range headers { + headerMap[h] = i + } + + type groupKey struct { + Source string + Asset string + } + groupedTickets := make(map[groupKey][]domain.Ticket) + + for _, row := range records[1:] { + ticket := domain.Ticket{ + Source: adapter.SourceName, + Status: "Waiting to be Triaged", + } + + if idx, ok := headerMap[adapter.MappingTitle]; ok && idx < len(row) { + ticket.Title = row[idx] + } + if idx, ok := headerMap[adapter.MappingAsset]; ok && idx < len(row) { + ticket.AssetIdentifier = row[idx] + } + if idx, ok := headerMap[adapter.MappingSeverity]; ok && idx < len(row) { + ticket.Severity = row[idx] + } + if idx, ok := headerMap[adapter.MappingDescription]; ok && idx < len(row) { + ticket.Description = row[idx] + } + if adapter.MappingRemediation != "" { + if idx, ok := headerMap[adapter.MappingRemediation]; ok && idx < len(row) { + ticket.RecommendedRemediation = row[idx] + } + } + + if ticket.Title != "" && ticket.AssetIdentifier != "" { + hashInput := ticket.Source + "|" + ticket.AssetIdentifier + "|" + ticket.Title + hash := sha256.Sum256([]byte(hashInput)) + ticket.DedupeHash = hex.EncodeToString(hash[:]) + key := groupKey{Source: ticket.Source, Asset: ticket.AssetIdentifier} + groupedTickets[key] = append(groupedTickets[key], ticket) + } + } + + for key, batch := range groupedTickets { + err := h.Store.ProcessIngestionBatch(r.Context(), key.Source, key.Asset, batch) + if err != nil { + log.Printf("๐Ÿ”ฅ CSV Ingestion Error for Asset %s: %v", key.Asset, err) + h.Store.LogSync(r.Context(), key.Source, "Failed", len(batch), err.Error()) + http.Error(w, "Database error processing CSV batch", http.StatusInternalServerError) + return + } else { + h.Store.LogSync(r.Context(), key.Source, "Success", len(batch), "") + } + } + w.WriteHeader(http.StatusCreated) +} diff --git a/pkg/ingest/ingest_test.go b/pkg/ingest/ingest_test.go new file mode 100644 index 0000000..ee04a37 --- /dev/null +++ b/pkg/ingest/ingest_test.go @@ -0,0 +1,488 @@ +package ingest + +import ( + "bytes" + "context" + "database/sql" + "encoding/json" + "fmt" + "mime/multipart" + "net/http" + "net/http/httptest" + "runtime/debug" + "testing" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func setupTestIngest(t *testing.T) (*Handler, *sql.DB) { + db := datastore.InitDB(":memory:") + store := datastore.NewSQLiteStore(db) + return NewHandler(store), db +} + +func GetVIPCookie(store domain.Store) *http.Cookie { + + user, err := store.GetUserByEmail(context.Background(), "vip@RiskRancher.com") + if err != nil { + user, _ = store.CreateUser(context.Background(), "vip@RiskRancher.com", "Test VIP", "hash", "Sheriff") + } + + store.CreateSession(context.Background(), "vip_token_999", user.ID, time.Now().Add(1*time.Hour)) + return &http.Cookie{Name: "session_token", Value: "vip_token_999"} +} + +func TestAutoPatchMissingFindings(t *testing.T) { + app, db := setupTestIngest(t) + defer db.Close() + + payload1 := []byte(`[ + {"title": "Vuln A", "severity": "High"}, + {"title": "Vuln B", "severity": "Medium"} + ] + `) + req1 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(payload1)) + req1.AddCookie(GetVIPCookie(app.Store)) + rr1 := httptest.NewRecorder() + app.HandleIngest(rr1, req1) + + var count int + db.QueryRow("SELECT COUNT(*) FROM tickets WHERE status = 'Waiting to be Triaged'").Scan(&count) + if count != 2 { + t.Fatalf("Expected 2 unpatched tickets, got %d", count) + } + + payload2 := []byte(` [ + {"title": "Vuln A", "severity": "High"} + ]`) + req2 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(payload2)) + req2.AddCookie(GetVIPCookie(app.Store)) + rr2 := httptest.NewRecorder() + app.HandleIngest(rr2, req2) + + var statusB string + var patchedAt sql.NullTime + + err := db.QueryRow("SELECT status, patched_at FROM tickets WHERE title = 'Vuln B'").Scan(&statusB, &patchedAt) + if err != nil { + t.Fatalf("Failed to query Vuln B: %v", err) + } + + if statusB != "Patched" { + t.Errorf("Expected Vuln B status to be 'Patched', got '%s'", statusB) + } + + if !patchedAt.Valid { + t.Errorf("Expected Vuln B to have a patched_at timestamp, but it was NULL") + } +} + +func TestHandleIngest(t *testing.T) { + a, db := setupTestIngest(t) + defer db.Close() + + sendIngestRequest := func(findings []domain.Ticket) *httptest.ResponseRecorder { + body, _ := json.Marshal(findings) + req := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + a.HandleIngest(rr, req) + return rr + } + + t.Run("1. Fresh Ingestion", func(t *testing.T) { + findings := []domain.Ticket{ + { + Source: "CrowdStrike", + AssetIdentifier: "Server-01", + Title: "Malware Detected", + Severity: "Critical", + }, + } + + rr := sendIngestRequest(findings) + if rr.Code != http.StatusCreated { + t.Fatalf("expected 201 Created, got %d", rr.Code) + } + + var count int + db.QueryRow("SELECT COUNT(*) FROM tickets").Scan(&count) + if count != 1 { + t.Errorf("expected 1 ticket in DB, got %d", count) + } + }) + + t.Run("2. Deduplication", func(t *testing.T) { + time.Sleep(1 * time.Second) + + findings := []domain.Ticket{ + { + Source: "CrowdStrike", + AssetIdentifier: "Server-01", + Title: "Malware Detected", + Severity: "Critical", + Description: "Updated Description", + }, + } + + rr := sendIngestRequest(findings) + if rr.Code != http.StatusCreated { + t.Fatalf("expected 201 Created, got %d", rr.Code) + } + + var count int + db.QueryRow("SELECT COUNT(*) FROM tickets").Scan(&count) + if count != 1 { + t.Errorf("expected still 1 ticket in DB due to dedupe, got %d", count) + } + + var desc string + db.QueryRow("SELECT description FROM tickets WHERE title = 'Malware Detected'").Scan(&desc) + if desc != "Updated Description" { + t.Errorf("expected description to update to 'Updated Description', got '%s'", desc) + } + }) + + t.Run("3. Scoped Auto-Patching", func(t *testing.T) { + findings := []domain.Ticket{ + { + Source: "CrowdStrike", + AssetIdentifier: "Server-01", + Title: "Outdated Antivirus", + Severity: "High", + }, + } + + rr := sendIngestRequest(findings) + if rr.Code != http.StatusCreated { + t.Fatalf("expected 201 Created, got %d", rr.Code) + } + + var totalCount int + db.QueryRow("SELECT COUNT(*) FROM tickets").Scan(&totalCount) + if totalCount != 2 { + t.Errorf("expected 2 total tickets in DB, got %d", totalCount) + } + + var status string + db.QueryRow("SELECT status FROM tickets WHERE title = 'Malware Detected'").Scan(&status) + if status != "Patched" { + t.Errorf("expected missing vulnerability to be auto-patched, but status is '%s'", status) + } + }) +} + +func TestCSVIngestion(t *testing.T) { + app, db := setupTestIngest(t) + defer db.Close() + + _, err := db.Exec(` + INSERT INTO data_adapters ( + id, name, source_name, findings_path, + mapping_title, mapping_asset, mapping_severity, mapping_description, mapping_remediation + ) VALUES ( + 999, 'Legacy Scanner V1', 'LegacyScan', '.', + 'Vuln_Name', 'Server_IP', 'Risk_Level', 'Details', 'Fix_Steps' + ) + `) + if err != nil { + t.Fatalf("Failed to setup test adapter: %v", err) + } + + rawCSV := `Vuln_Name,Server_IP,Risk_Level,Details,Junk_Column +SQL Injection,192.168.1.50,Critical,Found in login form,ignore_this +Outdated Apache,192.168.1.50,High,Upgrade to 2.4.50,ignore_this` + + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + part, _ := writer.CreateFormFile("file", "scan_results.csv") + part.Write([]byte(rawCSV)) + + writer.WriteField("adapter_id", "999") + writer.Close() + + req := httptest.NewRequest(http.MethodPost, "/api/ingest/csv", body) + req.Header.Set("Content-Type", writer.FormDataContentType()) + rr := httptest.NewRecorder() + + app.HandleCSVIngest(rr, req) + + if rr.Code != http.StatusCreated { + t.Fatalf("Expected 201 Created, got %d. Body: %s", rr.Code, rr.Body.String()) + } + + var count int + db.QueryRow("SELECT COUNT(*) FROM tickets WHERE source = 'LegacyScan'").Scan(&count) + + if count != 2 { + t.Errorf("Expected 2 tickets ingested from CSV, got %d", count) + } + + var title, severity string + db.QueryRow("SELECT title, severity FROM tickets WHERE title = 'SQL Injection'").Scan(&title, &severity) + if severity != "Critical" { + t.Errorf("CSV Mapping failed! Expected severity 'Critical', got '%s'", severity) + } +} + +func TestAutoPatchEdgeCases(t *testing.T) { + h, db := setupTestIngest(t) // Swapped 'app' for 'h' + defer db.Close() + + db.Exec(` + INSERT INTO tickets (source, title, severity, dedupe_hash, status) + VALUES ('App B', 'App B Vuln', 'High', 'hash-app-b', 'Waiting to be Triaged') + `) + + payload1 := []byte(`[ + {"source": "App A", "title": "Vuln 1", "severity": "High"}, + {"source": "App A", "title": "Vuln 2", "severity": "Medium"} + ]`) + req1 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(payload1)) + req1.AddCookie(GetVIPCookie(h.Store)) + req1.Header.Set("Content-Type", "application/json") + + rr1 := httptest.NewRecorder() + h.HandleIngest(rr1, req1) + + payload2 := []byte(`[ + {"source": "App A", "title": "Vuln 1", "severity": "High"} + ]`) + req2 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(payload2)) + req2.AddCookie(GetVIPCookie(h.Store)) + req2.Header.Set("Content-Type", "application/json") + + rr2 := httptest.NewRecorder() + h.HandleIngest(rr2, req2) + + var status2 string + db.QueryRow("SELECT status FROM tickets WHERE title = 'Vuln 2'").Scan(&status2) + if status2 != "Patched" { + t.Errorf("Expected Vuln 2 to be 'Patched', got '%s'", status2) + } + + var statusB string + db.QueryRow("SELECT status FROM tickets WHERE title = 'App B Vuln'").Scan(&statusB) + if statusB != "Waiting to be Triaged" { + t.Errorf("CRITICAL FAILURE: Blast radius exceeded! App B status changed to '%s'", statusB) + } +} + +func TestHandleIngest_MultiAssetDiffing(t *testing.T) { + // THE GO 1.26 GC TWEAK: Force Go to keep RAM usage under 2GB + // This makes the GC run aggressively, trading a tiny bit of CPU for massive RAM savings. + previousLimit := debug.SetMemoryLimit(2 * 1024 * 1024 * 1024) + defer debug.SetMemoryLimit(previousLimit) + + a, db := setupTestIngest(t) + db.Exec(`PRAGMA synchronous = OFF;`) + defer db.Close() + + _, err := db.Exec(`INSERT INTO tickets (source, asset_identifier, title, status, severity, dedupe_hash) VALUES + ('Trivy', 'Server-A', 'Old Vuln A', 'Waiting to be Triaged', 'High', 'hash_A_1'), + ('Trivy', 'Server-B', 'Old Vuln B', 'Waiting to be Triaged', 'Critical', 'hash_B_1')`) + if err != nil { + t.Fatalf("Failed to seed database: %v", err) + } + + incomingPayload := []domain.Ticket{ + { + Source: "Trivy", + AssetIdentifier: "Server-A", + Title: "New Vuln A", + Severity: "High", + DedupeHash: "hash_A_2", + }, + } + + body, _ := json.Marshal(incomingPayload) + req := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(body)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + + a.HandleIngest(rr, req) + + if rr.Code != http.StatusCreated { + t.Fatalf("Expected 201 Created, got %d", rr.Code) + } + + var statusA string + db.QueryRow(`SELECT status FROM tickets WHERE dedupe_hash = 'hash_A_1'`).Scan(&statusA) + if statusA != "Patched" { + t.Errorf("Expected Server-A's old ticket to be Auto-Patched, got '%s'", statusA) + } + + var statusB string + db.QueryRow(`SELECT status FROM tickets WHERE dedupe_hash = 'hash_B_1'`).Scan(&statusB) + if statusB != "Waiting to be Triaged" { + t.Errorf("CRITICAL BUG: Server-B's ticket was altered! Expected 'Waiting to be Triaged', got '%s'", statusB) + } +} + +func TestHandleIngest_OneMillionTicketStressTest(t *testing.T) { + if testing.Short() { + t.Skip("Skipping 1-million ticket stress test in short mode") + } + + a, db := setupTestIngest(t) + defer db.Close() + + numAssets := 10000 + vulnsPerAsset := 100 + + t.Logf("Generating baseline payload for %d tickets...", numAssets*vulnsPerAsset) + + baselinePayload := make([]domain.Ticket, 0, numAssets*vulnsPerAsset) + for assetID := 1; assetID <= numAssets; assetID++ { + assetName := fmt.Sprintf("Server-%05d", assetID) + for vulnID := 1; vulnID <= vulnsPerAsset; vulnID++ { + baselinePayload = append(baselinePayload, domain.Ticket{ + Source: "HeavyLoadTester", + AssetIdentifier: assetName, + Title: fmt.Sprintf("Vulnerability-%03d", vulnID), + Severity: "High", + }) + } + } + + t.Log("Marshaling 1M tickets to JSON...") + body1, _ := json.Marshal(baselinePayload) + req1 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(body1)) + rr1 := httptest.NewRecorder() + + t.Log("Hitting API with Baseline 1M Scan...") + a.HandleIngest(rr1, req1) + + if rr1.Code != http.StatusCreated { + t.Fatalf("Baseline ingest failed with status %d", rr1.Code) + } + + var count1 int + db.QueryRow(`SELECT COUNT(*) FROM tickets`).Scan(&count1) + if count1 != 1000000 { + t.Fatalf("Expected 1,000,000 tickets inserted, got %d", count1) + } + + t.Log("Generating Diff payload...") + + diffPayload := make([]domain.Ticket, 0, numAssets*vulnsPerAsset) + for assetID := 1; assetID <= numAssets; assetID++ { + assetName := fmt.Sprintf("Server-%05d", assetID) + + for vulnID := 1; vulnID <= 80; vulnID++ { + diffPayload = append(diffPayload, domain.Ticket{ + Source: "HeavyLoadTester", + AssetIdentifier: assetName, + Title: fmt.Sprintf("Vulnerability-%03d", vulnID), + Severity: "High", + }) + } + + for vulnID := 101; vulnID <= 120; vulnID++ { + diffPayload = append(diffPayload, domain.Ticket{ + Source: "HeavyLoadTester", + AssetIdentifier: assetName, + Title: fmt.Sprintf("Vulnerability-%03d", vulnID), + Severity: "Critical", + }) + } + } + + t.Log("Marshaling Diff payload to JSON...") + body2, _ := json.Marshal(diffPayload) + req2 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(body2)) + rr2 := httptest.NewRecorder() + + t.Log("Hitting API with Diff 1M Scan...") + a.HandleIngest(rr2, req2) + + if rr2.Code != http.StatusCreated { + t.Fatalf("Diff ingest failed with status %d", rr2.Code) + } + + t.Log("Running Assertions...") + + var totalRows int + db.QueryRow(`SELECT COUNT(*) FROM tickets`).Scan(&totalRows) + if totalRows != 1200000 { + t.Errorf("Expected exactly 1,200,000 total rows in DB, got %d", totalRows) + } + + var patchedCount int + db.QueryRow(`SELECT COUNT(*) FROM tickets WHERE status = 'Patched'`).Scan(&patchedCount) + if patchedCount != 200000 { + t.Errorf("Expected exactly 200,000 auto-patched tickets, got %d", patchedCount) + } + + var openCount int + db.QueryRow(`SELECT COUNT(*) FROM tickets WHERE status = 'Waiting to be Triaged'`).Scan(&openCount) + if openCount != 1000000 { + t.Errorf("Expected exactly 1,000,000 open tickets, got %d", openCount) + } +} + +func TestSyncLogReceipts(t *testing.T) { + h, db := setupTestIngest(t) + defer db.Close() + db.Exec(`CREATE TABLE IF NOT EXISTS sync_logs (id INTEGER PRIMARY KEY, source TEXT, status TEXT, records_processed INTEGER, error_message TEXT)`) + + payload := []byte(`[{"source": "Dependabot", "asset_identifier": "repo-1", "title": "Vuln 1", "severity": "High"}]`) + req1 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(payload)) + req1.AddCookie(GetVIPCookie(h.Store)) + req1.Header.Set("Content-Type", "application/json") + h.HandleIngest(httptest.NewRecorder(), req1) + + badPayload := []byte(`[{"source": "Dependabot", "title": "Vuln 1", "severity": "High", "status": "GarbageStatus"}]`) + + req2 := httptest.NewRequest(http.MethodPost, "/api/ingest", bytes.NewBuffer(badPayload)) + req2.AddCookie(GetVIPCookie(h.Store)) + req2.Header.Set("Content-Type", "application/json") + h.HandleIngest(httptest.NewRecorder(), req2) + + var successCount, failCount, processed int + db.QueryRow("SELECT COUNT(*), MAX(records_processed) FROM sync_logs WHERE source = 'Dependabot' AND status = 'Success'").Scan(&successCount, &processed) + db.QueryRow("SELECT COUNT(*) FROM sync_logs WHERE status = 'Failed'").Scan(&failCount) + + if successCount != 1 || processed != 1 { + t.Errorf("System failed to log successful sync receipt. Got count: %d, processed: %d", successCount, processed) + } + if failCount != 1 { + t.Errorf("System failed to log failed sync receipt. Got count: %d", failCount) + } +} + +func TestUIFileDropIngestion(t *testing.T) { + h, db := setupTestIngest(t) + defer db.Close() + + res, err := db.Exec(`INSERT INTO data_adapters (name, source_name, mapping_title, mapping_asset, mapping_severity) VALUES ('UI-Tool', 'UITool', 'Name', 'Host', 'Risk')`) + if err != nil { + t.Fatalf("failed to seed adapter: %v", err) + } + adapterID, _ := res.LastInsertId() + + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + part, _ := writer.CreateFormFile("file", "test_findings.csv") + part.Write([]byte("Name,Host,Risk\nUnauthorized Access,10.0.0.1,Critical")) + + _ = writer.WriteField("adapter_id", fmt.Sprintf("%d", adapterID)) + writer.Close() + + req := httptest.NewRequest(http.MethodPost, "/api/ingest/csv", body) + req.Header.Set("Content-Type", writer.FormDataContentType()) + req.AddCookie(GetVIPCookie(h.Store)) + rr := httptest.NewRecorder() + h.HandleCSVIngest(rr, req) + + if rr.Code != http.StatusCreated { + t.Fatalf("expected 201 Created, got %d: %s", rr.Code, rr.Body.String()) + } + var count int + db.QueryRow("SELECT COUNT(*) FROM tickets WHERE source = 'UITool'").Scan(&count) + if count != 1 { + t.Errorf("UI Drop failed: expected 1 ticket, got %d", count) + } +} diff --git a/pkg/report/docx_html.go b/pkg/report/docx_html.go new file mode 100644 index 0000000..8ecf826 --- /dev/null +++ b/pkg/report/docx_html.go @@ -0,0 +1,113 @@ +package report + +import ( + "archive/zip" + "bytes" + "encoding/base64" + "encoding/xml" + "fmt" + "io" + "net/http" + "path/filepath" + "strings" +) + +// Relationships maps the pkg rId to the actual media file +type Relationships struct { + XMLName xml.Name `xml:"Relationships"` + Rel []struct { + Id string `xml:"Id,attr"` + Target string `xml:"Target,attr"` + } `xml:"Relationship"` +} + +func ServeDOCXAsHTML(w http.ResponseWriter, docxPath string) { + r, err := zip.OpenReader(docxPath) + if err != nil { + http.Error(w, "Failed to open DOCX archive", http.StatusInternalServerError) + return + } + defer r.Close() + + relsMap := make(map[string]string) + for _, f := range r.File { + if f.Name == "word/_rels/document.xml.rels" { + rc, _ := f.Open() + var rels Relationships + xml.NewDecoder(rc).Decode(&rels) + rc.Close() + for _, rel := range rels.Rel { + relsMap[rel.Id] = rel.Target + } + break + } + } + + mediaMap := make(map[string]string) + for _, f := range r.File { + if strings.HasPrefix(f.Name, "word/media/") { + rc, _ := f.Open() + data, _ := io.ReadAll(rc) + rc.Close() + + ext := strings.TrimPrefix(filepath.Ext(f.Name), ".") + if ext == "jpeg" || ext == "jpg" { + ext = "jpeg" + } + b64 := base64.StdEncoding.EncodeToString(data) + mediaMap[f.Name] = fmt.Sprintf("data:image/%s;base64,%s", ext, b64) + } + } + + var htmlOutput bytes.Buffer + var inParagraph bool + + for _, f := range r.File { + if f.Name == "word/document.xml" { + rc, _ := f.Open() + decoder := xml.NewDecoder(rc) + + for { + token, err := decoder.Token() + if err != nil { + break + } + + switch se := token.(type) { + case xml.StartElement: + if se.Name.Local == "p" { + htmlOutput.WriteString("

") + inParagraph = true + } + if se.Name.Local == "t" { + var text string + decoder.DecodeElement(&text, &se) + htmlOutput.WriteString(text) + } + if se.Name.Local == "blip" { + for _, attr := range se.Attr { + if attr.Name.Local == "embed" { + targetPath := relsMap[attr.Value] + fullMediaPath := "word/" + targetPath + + if b64URI, exists := mediaMap[fullMediaPath]; exists { + imgTag := fmt.Sprintf(`

`, b64URI) + htmlOutput.WriteString(imgTag) + } + } + } + } + case xml.EndElement: + if se.Name.Local == "p" && inParagraph { + htmlOutput.WriteString("

\n") + inParagraph = false + } + } + } + rc.Close() + break + } + } + + w.Write(htmlOutput.Bytes()) +} diff --git a/pkg/report/drafts.go b/pkg/report/drafts.go new file mode 100644 index 0000000..c734ef9 --- /dev/null +++ b/pkg/report/drafts.go @@ -0,0 +1,107 @@ +package report + +import ( + "encoding/json" + "net/http" + "strconv" + + "epigas.gitea.cloud/RiskRancher/core/pkg/auth" + domain2 "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func (h *Handler) HandleSaveDraft(w http.ResponseWriter, r *http.Request) { + reportID := r.PathValue("id") + + var draft domain2.DraftTicket + if err := json.NewDecoder(r.Body).Decode(&draft); err != nil { + http.Error(w, "Invalid JSON", http.StatusBadRequest) + return + } + draft.ReportID = reportID + + if err := h.Store.SaveDraft(r.Context(), draft); err != nil { + http.Error(w, "DB Error: "+err.Error(), http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusCreated) +} + +func (h *Handler) HandleGetDrafts(w http.ResponseWriter, r *http.Request) { + reportID := r.PathValue("id") + + drafts, err := h.Store.GetDraftsByReport(r.Context(), reportID) + if err != nil { + http.Error(w, "Failed to get drafts", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(drafts) +} + +func (h *Handler) HandleDeleteDraft(w http.ResponseWriter, r *http.Request) { + draftID := r.PathValue("draft_id") + + if err := h.Store.DeleteDraft(r.Context(), draftID); err != nil { + http.Error(w, "Failed to delete draft", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusOK) +} + +func (h *Handler) HandlePromoteDrafts(w http.ResponseWriter, r *http.Request) { + reportIDStr := r.PathValue("id") + if reportIDStr == "" { + http.Error(w, "Invalid Report ID", http.StatusBadRequest) + return + } + + userIDVal := r.Context().Value(auth.UserIDKey) + if userIDVal == nil { + http.Error(w, "Unauthorized", http.StatusUnauthorized) + return + } + + user, err := h.Store.GetUserByID(r.Context(), userIDVal.(int)) + if err != nil { + http.Error(w, "Failed to identify user", http.StatusInternalServerError) + return + } + analystEmail := user.Email + + var payload []domain2.Ticket + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + if err := h.Store.PromotePentestDrafts(r.Context(), reportIDStr, analystEmail, payload); err != nil { + http.Error(w, "Database error during promotion: "+err.Error(), http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusCreated) +} + +func (h *Handler) HandleUpdateDraft(w http.ResponseWriter, r *http.Request) { + idStr := r.PathValue("id") + draftID, err := strconv.Atoi(idStr) + if err != nil { + http.Error(w, "Invalid draft ID", http.StatusBadRequest) + return + } + + var payload domain2.Ticket + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + http.Error(w, "Invalid JSON", http.StatusBadRequest) + return + } + + if err := h.Store.UpdateDraft(r.Context(), draftID, payload); err != nil { + http.Error(w, "Failed to auto-save draft", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusOK) +} diff --git a/pkg/report/handler.go b/pkg/report/handler.go new file mode 100644 index 0000000..438b4ee --- /dev/null +++ b/pkg/report/handler.go @@ -0,0 +1,13 @@ +package report + +import ( + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +type Handler struct { + Store domain.Store +} + +func NewHandler(store domain.Store) *Handler { + return &Handler{Store: store} +} diff --git a/pkg/report/parser.go b/pkg/report/parser.go new file mode 100644 index 0000000..8e09858 --- /dev/null +++ b/pkg/report/parser.go @@ -0,0 +1,57 @@ +package report + +import ( + "fmt" + "strconv" + "strings" +) + +// ExtractJSONField traverses an unmarshaled JSON object using dot notation. +func ExtractJSONField(data any, path string) string { + if path == "" || data == nil { + return "" + } + + parts := strings.Split(path, ".") + current := data + + for _, part := range parts { + if current == nil { + return "" + } + + switch v := current.(type) { + case map[string]any: + val, ok := v[part] + if !ok { + return "" + } + current = val + + case []any: + idx, err := strconv.Atoi(part) + if err != nil || idx < 0 || idx >= len(v) { + return "" + } + current = v[idx] + + default: + return "" + } + } + + if current == nil { + return "" + } + + switch v := current.(type) { + case string: + return v + case float64: + return strconv.FormatFloat(v, 'f', -1, 64) + case bool: + return strconv.FormatBool(v) + default: + return fmt.Sprintf("%v", v) + } +} diff --git a/pkg/report/parser_test.go b/pkg/report/parser_test.go new file mode 100644 index 0000000..3708b54 --- /dev/null +++ b/pkg/report/parser_test.go @@ -0,0 +1,68 @@ +package report + +import ( + "encoding/json" + "testing" +) + +func TestExtractJSONField(t *testing.T) { + semgrepRaw := []byte(`{ + "check_id": "crypto-bad-mac", + "extra": { + "severity": "WARNING", + "message": "Use of weak MAC" + } + }`) + var semgrep map[string]any + json.Unmarshal(semgrepRaw, &semgrep) + + trivyRaw := []byte(`{ + "VulnerabilityID": "CVE-2021-44228", + "PkgName": "log4j-core", + "Severity": "CRITICAL" + }`) + var trivy map[string]any + json.Unmarshal(trivyRaw, &trivy) + + openvasRaw := []byte(`{ + "name": "Cleartext Transmission", + "host": { + "details": [ + {"ip": "192.168.1.50"}, + {"ip": "10.0.0.5"} + ] + }, + "threat": "High" + }`) + var openvas map[string]any + json.Unmarshal(openvasRaw, &openvas) + + tests := []struct { + name string + finding any + path string + expected string + }{ + {"Semgrep Flat", semgrep, "check_id", "crypto-bad-mac"}, + {"Semgrep Nested", semgrep, "extra.severity", "WARNING"}, + {"Semgrep Deep Nested", semgrep, "extra.message", "Use of weak MAC"}, + + {"Trivy Flat 1", trivy, "VulnerabilityID", "CVE-2021-44228"}, + {"Trivy Flat 2", trivy, "Severity", "CRITICAL"}, + + {"OpenVAS Flat", openvas, "threat", "High"}, + {"OpenVAS Array Index", openvas, "host.details.0.ip", "192.168.1.50"}, + + {"Missing Field", trivy, "does.not.exist", ""}, + {"Empty Path", trivy, "", ""}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := ExtractJSONField(tc.finding, tc.path) + if result != tc.expected { + t.Errorf("Path '%s': expected '%s', got '%s'", tc.path, tc.expected, result) + } + }) + } +} diff --git a/pkg/report/reports.go b/pkg/report/reports.go new file mode 100644 index 0000000..89d4ff0 --- /dev/null +++ b/pkg/report/reports.go @@ -0,0 +1,131 @@ +package report + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "io" + "math/rand" + "net/http" + "os" + "path/filepath" + "strings" +) + +var UploadDir = "./testdata" + +// HandleUploadReport safely receives and stores the pentest file +func (h *Handler) HandleUploadReport(w http.ResponseWriter, r *http.Request) { + if err := r.ParseMultipartForm(50 << 20); err != nil { + http.Error(w, "Failed to parse form or file too large", http.StatusBadRequest) + return + } + + file, header, err := r.FormFile("file") + if err != nil { + http.Error(w, "Missing 'file' field in upload", http.StatusBadRequest) + return + } + defer file.Close() + + cleanName := filepath.Base(header.Filename) + if cleanName == "." || cleanName == "/" { + cleanName = "uploaded_report.bin" + } + + os.MkdirAll(UploadDir, 0755) + + destPath := filepath.Join(UploadDir, cleanName) + destFile, err := os.Create(destPath) + if err != nil { + http.Error(w, "Failed to save file to disk", http.StatusInternalServerError) + return + } + defer destFile.Close() + + if _, err := io.Copy(destFile, file); err != nil { + http.Error(w, "Error writing file", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + fmt.Fprintf(w, `{"file_id": "%s"}`, cleanName) +} + +// HandleViewReport streams the file to the iframe, converting DOCX if needed +func (h *Handler) HandleViewReport(w http.ResponseWriter, r *http.Request) { + fileID := r.PathValue("id") + cleanName := filepath.Base(fileID) + filePath := filepath.Join(UploadDir, cleanName) + + if _, err := os.Stat(filePath); os.IsNotExist(err) { + http.Error(w, "Report not found", http.StatusNotFound) + return + } + + ext := strings.ToLower(filepath.Ext(cleanName)) + + if ext == ".pdf" { + w.Header().Set("Content-Type", "application/pdf") + w.Header().Set("Content-Disposition", "inline; filename="+cleanName) + http.ServeFile(w, r, filePath) + return + } + + if ext == ".docx" { + ServeDOCXAsHTML(w, filePath) + return + } + + http.Error(w, "Unsupported file type. Please upload PDF or DOCX.", http.StatusBadRequest) +} + +func (h *Handler) HandleImageUpload(w http.ResponseWriter, r *http.Request) { + var payload struct { + Base64Data string `json:"image_data"` + } + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + + parts := strings.SplitN(payload.Base64Data, ",", 2) + if len(parts) != 2 { + http.Error(w, "Invalid Base64 image format", http.StatusBadRequest) + return + } + + ext := ".png" + if strings.Contains(parts[0], "jpeg") || strings.Contains(parts[0], "jpg") { + ext = ".jpg" + } + + rawBase64 := parts[1] + imgBytes, err := base64.StdEncoding.DecodeString(rawBase64) + if err != nil { + http.Error(w, "Failed to decode Base64 data", http.StatusInternalServerError) + return + } + + randBytes := make([]byte, 8) + rand.Read(randBytes) + fileName := fmt.Sprintf("img_%x%s", randBytes, ext) + + uploadDir := filepath.Join("data", "testdata", "images") + if err := os.MkdirAll(uploadDir, 0755); err != nil { + http.Error(w, "Failed to create directory structure", http.StatusInternalServerError) + return + } + + savePath := filepath.Join(uploadDir, fileName) + if err := os.WriteFile(savePath, imgBytes, 0644); err != nil { + http.Error(w, "Failed to save image to disk", http.StatusInternalServerError) + return + } + + publicURL := "/testdata/images/" + fileName + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"url": publicURL}) +} diff --git a/pkg/report/reports_test.go b/pkg/report/reports_test.go new file mode 100644 index 0000000..4d5b22f --- /dev/null +++ b/pkg/report/reports_test.go @@ -0,0 +1,126 @@ +package report + +import ( + "archive/zip" + "bytes" + "context" + "database/sql" + "encoding/json" + "mime/multipart" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func setupTestReport(t *testing.T) (*Handler, *sql.DB) { + db := datastore.InitDB(":memory:") + store := datastore.NewSQLiteStore(db) + return NewHandler(store), db +} + +func GetVIPCookie(store domain.Store) *http.Cookie { + user, err := store.GetUserByEmail(context.Background(), "vip@RiskRancher.com") + if err != nil { + user, _ = store.CreateUser(context.Background(), "vip@RiskRancher.com", "Test VIP", "hash", "Sheriff") + } + + store.CreateSession(context.Background(), "vip_token_999", user.ID, time.Now().Add(1*time.Hour)) + return &http.Cookie{Name: "session_token", Value: "vip_token_999"} +} + +func TestUploadAndViewReports(t *testing.T) { + h, db := setupTestReport(t) + defer db.Close() + + t.Run("1. Test PDF Upload and View", func(t *testing.T) { + body := new(bytes.Buffer) + writer := multipart.NewWriter(body) + part, _ := writer.CreateFormFile("file", "test_report.pdf") + part.Write([]byte("%PDF-1.4 Fake PDF Content")) + writer.Close() + + reqUp := httptest.NewRequest(http.MethodPost, "/api/reports/upload", body) + reqUp.AddCookie(GetVIPCookie(h.Store)) + reqUp.Header.Set("Content-Type", writer.FormDataContentType()) + rrUp := httptest.NewRecorder() + h.HandleUploadReport(rrUp, reqUp) + + reqView := httptest.NewRequest(http.MethodGet, "/api/reports/view/test_report.pdf", nil) + reqView.AddCookie(GetVIPCookie(h.Store)) + reqView.SetPathValue("id", "test_report.pdf") + rrView := httptest.NewRecorder() + h.HandleViewReport(rrView, reqView) + + if rrView.Code != http.StatusOK { + t.Fatalf("Expected 200 OK for PDF View, got %d", rrView.Code) + } + }) + + t.Run("2. Test DOCX to HTML", func(t *testing.T) { + buf := new(bytes.Buffer) + zipWriter := zip.NewWriter(buf) + docWriter, _ := zipWriter.Create("word/document.xml") + docWriter.Write([]byte(`Cross-Site Scripting`)) + zipWriter.Close() + + body := new(bytes.Buffer) + writer := multipart.NewWriter(body) + part, _ := writer.CreateFormFile("file", "fake_pentest.docx") + part.Write(buf.Bytes()) + writer.Close() + + reqUp := httptest.NewRequest(http.MethodPost, "/api/reports/upload", body) + reqUp.Header.Set("Content-Type", writer.FormDataContentType()) + rrUp := httptest.NewRecorder() + h.HandleUploadReport(rrUp, reqUp) + + reqView := httptest.NewRequest(http.MethodGet, "/api/reports/view/fake_pentest.docx", nil) + reqView.SetPathValue("id", "fake_pentest.docx") + rrView := httptest.NewRecorder() + h.HandleViewReport(rrView, reqView) + + if !strings.Contains(rrView.Body.String(), "Cross-Site Scripting") { + t.Errorf("DOCX-to-HTML failed. Body: %s", rrView.Body.String()) + } + }) +} + +func TestDraftQueueLifecycle(t *testing.T) { + h, db := setupTestReport(t) + defer db.Close() + + reportID := "report-uuid-123.pdf" + + // Save Draft + draftPayload := []byte(`{"title": "SQLi", "severity": "High", "description": "Page 4"}`) + reqPost := httptest.NewRequest(http.MethodPost, "/api/drafts/report/"+reportID, bytes.NewBuffer(draftPayload)) + reqPost.SetPathValue("id", reportID) + rrPost := httptest.NewRecorder() + h.HandleSaveDraft(rrPost, reqPost) + + if rrPost.Code >= 400 { + t.Fatalf("Failed to save draft! HTTP Code: %d, Error: %s", rrPost.Code, rrPost.Body.String()) + } + + reqGet := httptest.NewRequest(http.MethodGet, "/api/drafts/report/"+reportID, nil) + reqGet.SetPathValue("id", reportID) + rrGet := httptest.NewRecorder() + h.HandleGetDrafts(rrGet, reqGet) + + var drafts []domain.DraftTicket + json.NewDecoder(rrGet.Body).Decode(&drafts) + if len(drafts) != 1 || drafts[0].Title != "SQLi" { + t.Fatalf("Draft GET mismatch") + } + + // Delete Draft + reqDel := httptest.NewRequest(http.MethodDelete, "/api/drafts/1", nil) + reqDel.SetPathValue("draft_id", "1") + rrDel := httptest.NewRecorder() + h.HandleDeleteDraft(rrDel, reqDel) +} diff --git a/pkg/report/testdata/fake_pentest.docx b/pkg/report/testdata/fake_pentest.docx new file mode 100644 index 0000000..afca121 Binary files /dev/null and b/pkg/report/testdata/fake_pentest.docx differ diff --git a/pkg/report/testdata/test_report.pdf b/pkg/report/testdata/test_report.pdf new file mode 100644 index 0000000..9c98a40 --- /dev/null +++ b/pkg/report/testdata/test_report.pdf @@ -0,0 +1 @@ +%PDF-1.4 Fake PDF Content \ No newline at end of file diff --git a/pkg/server/app.go b/pkg/server/app.go new file mode 100644 index 0000000..30a769b --- /dev/null +++ b/pkg/server/app.go @@ -0,0 +1,34 @@ +package server + +import ( + "net/http" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" + "epigas.gitea.cloud/RiskRancher/core/pkg/sla" +) + +type App struct { + Store domain.Store + Router *http.ServeMux + Auth domain.Authenticator + SLA domain.SLACalculator +} + +type FreeAuth struct{} + +func (f *FreeAuth) Middleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // In the OSS version, we just pass the request to the next handler for now. + next.ServeHTTP(w, r) + }) +} + +// NewApp creates a Risk Rancher Core application with OSS defaults. +func NewApp(store domain.Store) *App { + return &App{ + Store: store, + Router: http.NewServeMux(), + Auth: &FreeAuth{}, + SLA: sla.NewSLACalculator(), + } +} diff --git a/pkg/server/routes.go b/pkg/server/routes.go new file mode 100644 index 0000000..e1d6b84 --- /dev/null +++ b/pkg/server/routes.go @@ -0,0 +1,116 @@ +package server + +import ( + "net/http" + + "epigas.gitea.cloud/RiskRancher/core/pkg/adapters" + "epigas.gitea.cloud/RiskRancher/core/pkg/admin" + "epigas.gitea.cloud/RiskRancher/core/pkg/analytics" + "epigas.gitea.cloud/RiskRancher/core/pkg/auth" + "epigas.gitea.cloud/RiskRancher/core/pkg/ingest" + "epigas.gitea.cloud/RiskRancher/core/pkg/report" + "epigas.gitea.cloud/RiskRancher/core/pkg/tickets" + "epigas.gitea.cloud/RiskRancher/core/ui" +) + +func RegisterRoutes(app *App) { + + authH := auth.NewHandler(app.Store) + adminH := admin.NewHandler(app.Store) + ticketH := tickets.NewHandler(app.Store) + ingestH := ingest.NewHandler(app.Store) + adapterH := adapters.NewHandler(app.Store) + reportH := report.NewHandler(app.Store) + analyticsH := analytics.NewHandler(app.Store) + + protected := func(h http.HandlerFunc) http.Handler { + return authH.RequireAuth(http.HandlerFunc(h)) + } + protectedUI := func(h http.HandlerFunc) http.Handler { + return authH.RequireUIAuth(http.HandlerFunc(h)) + } + sheriffOnly := func(h http.HandlerFunc) http.Handler { + return authH.RequireAuth(authH.RequireRole("Sheriff")(http.HandlerFunc(h))) + } + adminOnly := func(h http.HandlerFunc) http.Handler { + return authH.RequireAuth(authH.RequireAnyRole("Sheriff", "Wrangler")(http.HandlerFunc(h))) + } + + // ========================================================= + // PUBLIC ROUTES + // ========================================================= + app.Router.Handle("GET /login", ui.HandleLoginUI()) + app.Router.Handle("GET /register", ui.HandleRegisterUI()) + + app.Router.HandleFunc("POST /api/auth/register", authH.HandleRegister) + app.Router.HandleFunc("POST /api/auth/login", authH.HandleLogin) + app.Router.HandleFunc("POST /api/auth/logout", authH.HandleLogout) + + // ========================================================= + // PROTECTED ROUTES + // ========================================================= + app.Router.Handle("GET /api/wranglers", protected(adminH.HandleGetWranglers)) + app.Router.Handle("GET /", http.RedirectHandler("/dashboard", http.StatusSeeOther)) + app.Router.Handle("GET /dashboard", protectedUI(ui.HandleDashboard(app.Store))) + + // Core Tickets + app.Router.Handle("GET /api/tickets", protected(ticketH.HandleGetTickets)) + app.Router.Handle("POST /api/tickets", protected(ticketH.HandleCreateTicket)) + app.Router.Handle("PATCH /api/tickets/{id}", protected(ticketH.HandleUpdateTicket)) + + // Ingestion + app.Router.Handle("POST /api/ingest", protected(ingestH.HandleIngest)) + app.Router.Handle("POST /api/ingest/csv", protected(ingestH.HandleCSVIngest)) + app.Router.Handle("POST /api/ingest/{name}", protected(adapterH.HandleAdapterIngest)) + + // Adapters & Configuration + app.Router.Handle("GET /api/adapters", protected(adapterH.HandleGetAdapters)) + app.Router.Handle("GET /api/config", protected(adminH.HandleGetConfig)) + + // Analytics + app.Router.Handle("GET /api/analytics/summary", protected(analyticsH.HandleGetAnalyticsSummary)) + + // Pentest Reports & Drafts (PDF PARSER - Free Lead Magnet!) + app.Router.Handle("POST /api/reports/upload", protected(reportH.HandleUploadReport)) + app.Router.Handle("GET /api/reports/view/{id}", protected(reportH.HandleViewReport)) + app.Router.Handle("POST /api/drafts/report/{id}", protected(reportH.HandleSaveDraft)) + app.Router.Handle("GET /api/drafts/report/{id}", protected(reportH.HandleGetDrafts)) + app.Router.Handle("DELETE /api/drafts/{draft_id}", protected(reportH.HandleDeleteDraft)) + + // ========================================================= + // SHERIFF & ADMIN ONLY + // ========================================================= + + app.Router.Handle("GET /admin", sheriffOnly(ui.HandleAdminDashboard(app.Store))) + + app.Router.Handle("POST /api/adapters", adminOnly(adapterH.HandleCreateAdapter)) + app.Router.Handle("DELETE /api/adapters/{id}", adminOnly(adapterH.HandleDeleteAdapter)) + + app.Router.Handle("GET /api/admin/export", sheriffOnly(adminH.HandleExportState)) + app.Router.Handle("GET /api/admin/check-updates", sheriffOnly(adminH.HandleCheckUpdates)) + app.Router.Handle("POST /api/admin/shutdown", sheriffOnly(adminH.HandleShutdown)) + + app.Router.Handle("GET /api/admin/users", adminOnly(adminH.HandleGetUsers)) + app.Router.Handle("POST /api/admin/users", sheriffOnly(adminH.HandleCreateUser)) + app.Router.Handle("PATCH /api/admin/users/{id}/reset-password", sheriffOnly(adminH.HandleAdminResetPassword)) + app.Router.Handle("PATCH /api/admin/users/{id}/role", sheriffOnly(adminH.HandleUpdateUserRole)) + app.Router.Handle("DELETE /api/admin/users/{id}", sheriffOnly(adminH.HandleDeactivateUser)) + app.Router.Handle("GET /api/admin/logs", sheriffOnly(adminH.HandleGetLogs)) + + app.Router.Handle("GET /static/", ui.StaticHandler()) + + // ========================================================= + // UI EXTENSIONS + // ========================================================= + + app.Router.Handle("GET /ingest", protectedUI(ui.HandleIngestUI(app.Store))) + app.Router.Handle("GET /admin/adapters/new", protectedUI(ui.HandleAdapterBuilderUI(app.Store))) + + // Word Docx Parser + app.Router.Handle("GET /reports/parser/{id}", protectedUI(ui.HandleParserUI(app.Store))) + app.Router.Handle("POST /api/reports/promote/{id}", protected(reportH.HandlePromoteDrafts)) + app.Router.Handle("GET /reports/upload", protectedUI(ui.HandlePentestUploadUI(app.Store))) + app.Router.Handle("PUT /api/drafts/{id}", protected(reportH.HandleUpdateDraft)) + app.Router.Handle("POST /api/images/upload", protected(reportH.HandleImageUpload)) + app.Router.Handle("GET /uploads/", http.StripPrefix("/testdata/", http.FileServer(http.Dir("./data/testdata")))) +} diff --git a/pkg/sla/sla.go b/pkg/sla/sla.go new file mode 100644 index 0000000..9f156b4 --- /dev/null +++ b/pkg/sla/sla.go @@ -0,0 +1,127 @@ +package sla + +import ( + "context" + "log" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +// DefaultSLACalculator implements the SLACalculator interface +type DefaultSLACalculator struct { + Timezone string + BusinessStart int + BusinessEnd int + Holidays map[string]bool +} + +// NewSLACalculator returns the interface +func NewSLACalculator() domain.SLACalculator { + return &DefaultSLACalculator{ + Timezone: "UTC", + BusinessStart: 9, + BusinessEnd: 17, + Holidays: make(map[string]bool), + } +} + +// CalculateDueDate for the finding based on SLA +func (c *DefaultSLACalculator) CalculateDueDate(severity string) *time.Time { + var days int + switch severity { + case "Critical": + days = 3 + case "High": + days = 14 + case "Medium": + days = 30 + case "Low": + days = 90 + default: + days = 30 + } + + loc, err := time.LoadLocation(c.Timezone) + if err != nil { + log.Printf("Warning: Invalid timezone '%s', falling back to UTC", c.Timezone) + loc = time.UTC + } + + nowLocal := time.Now().In(loc) + dueDate := c.AddBusinessDays(nowLocal, days) + return &dueDate +} + +// AddBusinessDays for working days not weekends and some holidays +func (c *DefaultSLACalculator) AddBusinessDays(start time.Time, businessDays int) time.Time { + current := start + added := 0 + for added < businessDays { + current = current.AddDate(0, 0, 1) + weekday := current.Weekday() + dateStr := current.Format("2006-01-02") + if weekday != time.Saturday && weekday != time.Sunday && !c.Holidays[dateStr] { + added++ + } + } + return current +} + +// CalculateTrueSLAHours based on the time of action for ticket +func (c *DefaultSLACalculator) CalculateTrueSLAHours(ctx context.Context, ticketID int, store domain.Store) (float64, error) { + appConfig, err := store.GetAppConfig(ctx) + if err != nil { + return 0, err + } + + ticket, err := store.GetTicketByID(ctx, ticketID) + if err != nil { + return 0, err + } + + end := time.Now() + if ticket.PatchedAt != nil { + end = *ticket.PatchedAt + } + + totalActiveBusinessHours := c.calculateBusinessHoursBetween(ticket.CreatedAt, end, appConfig) + return totalActiveBusinessHours, nil +} + +// calculateBusinessHoursBetween calculates strict working hours between two timestamps +func (c *DefaultSLACalculator) calculateBusinessHoursBetween(start, end time.Time, config domain.AppConfig) float64 { + loc, _ := time.LoadLocation(config.Timezone) + start = start.In(loc) + end = end.In(loc) + + if start.After(end) { + return 0 + } + + var activeHours float64 + current := start + + for current.Before(end) { + nextHour := current.Add(time.Hour) + if nextHour.After(end) { + nextHour = end + } + + weekday := current.Weekday() + dateStr := current.Format("2006-01-02") + hour := current.Hour() + + isWeekend := weekday == time.Saturday || weekday == time.Sunday + isHoliday := c.Holidays[dateStr] + isBusinessHour := hour >= config.BusinessStart && hour < config.BusinessEnd + + if !isWeekend && !isHoliday && isBusinessHour { + activeHours += nextHour.Sub(current).Hours() + } + + current = nextHour + } + + return activeHours +} diff --git a/pkg/sla/sla_test.go b/pkg/sla/sla_test.go new file mode 100644 index 0000000..626569f --- /dev/null +++ b/pkg/sla/sla_test.go @@ -0,0 +1,116 @@ +package sla_test + +import ( + "database/sql" + "testing" + + _ "github.com/mattn/go-sqlite3" +) + +// GetSLAPolicy simulates the core engine function that fetches SLA rules +func GetSLAPolicy(db *sql.DB, domain string, severity string) (daysToRemediate int, maxExtensions int, err error) { + query := `SELECT days_to_remediate, max_extensions FROM sla_policies WHERE domain = ? AND severity = ?` + err = db.QueryRow(query, domain, severity).Scan(&daysToRemediate, &maxExtensions) + return daysToRemediate, maxExtensions, err +} + +// setupTestDB spins up an isolated, in-memory database for testing +func setupTestDB(t *testing.T) *sql.DB { + db, err := sql.Open("sqlite3", ":memory:") + if err != nil { + t.Fatalf("Failed to open test database: %v", err) + } + + schema := ` + CREATE TABLE domains (name TEXT PRIMARY KEY); + CREATE TABLE sla_policies ( + domain TEXT NOT NULL, + severity TEXT NOT NULL, + days_to_remediate INTEGER NOT NULL, + max_extensions INTEGER NOT NULL DEFAULT 3, + PRIMARY KEY (domain, severity) + ); + INSERT INTO domains (name) VALUES ('Vulnerability'), ('Privacy'), ('Incident'); + INSERT INTO sla_policies (domain, severity, days_to_remediate, max_extensions) VALUES + ('Vulnerability', 'Critical', 14, 1), + ('Vulnerability', 'High', 30, 2), + ('Privacy', 'Critical', 3, 0), + ('Incident', 'Critical', 1, 0); + ` + if _, err := db.Exec(schema); err != nil { + t.Fatalf("Failed to execute test schema: %v", err) + } + return db +} + +func TestSLAEngine(t *testing.T) { + db := setupTestDB(t) + defer db.Close() + + tests := []struct { + name string + domain string + severity string + expectDays int + expectExtensions int + expectError bool + }{ + { + name: "VM Critical (Standard)", + domain: "Vulnerability", + severity: "Critical", + expectDays: 14, + expectExtensions: 1, + expectError: false, + }, + { + name: "Privacy Critical (Strict 72-hour, No Extensions)", + domain: "Privacy", + severity: "Critical", + expectDays: 3, + expectExtensions: 0, + expectError: false, + }, + { + name: "Incident Critical (24-hour, No Extensions)", + domain: "Incident", + severity: "Critical", + expectDays: 1, + expectExtensions: 0, + expectError: false, + }, + { + name: "Unknown Domain (Should Fail)", + domain: "PhysicalSecurity", + severity: "Critical", + expectError: true, + }, + { + name: "Unknown Severity (Should Fail)", + domain: "Vulnerability", + severity: "SuperCritical", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + days, extensions, err := GetSLAPolicy(db, tt.domain, tt.severity) + + if (err != nil) != tt.expectError { + t.Fatalf("expected error: %v, got: %v", tt.expectError, err) + } + + if tt.expectError { + return + } + + if days != tt.expectDays { + t.Errorf("expected %d days, got %d", tt.expectDays, days) + } + if extensions != tt.expectExtensions { + t.Errorf("expected %d max extensions, got %d", tt.expectExtensions, extensions) + } + }) + } +} diff --git a/pkg/tickets/handler.go b/pkg/tickets/handler.go new file mode 100644 index 0000000..32335a4 --- /dev/null +++ b/pkg/tickets/handler.go @@ -0,0 +1,15 @@ +package tickets + +import ( + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +// Handler encapsulates all Ticket-related HTTP logic +type Handler struct { + Store domain.Store +} + +// NewHandler creates a new Tickets Handler +func NewHandler(store domain.Store) *Handler { + return &Handler{Store: store} +} diff --git a/pkg/tickets/handlers_test.go b/pkg/tickets/handlers_test.go new file mode 100644 index 0000000..3339d4f --- /dev/null +++ b/pkg/tickets/handlers_test.go @@ -0,0 +1,73 @@ +package tickets + +import ( + "bytes" + "context" + "database/sql" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "epigas.gitea.cloud/RiskRancher/core/pkg/datastore" + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +func setupTestTickets(t *testing.T) (*Handler, *sql.DB) { + db := datastore.InitDB(":memory:") + store := datastore.NewSQLiteStore(db) + return NewHandler(store), db +} + +// GetVIPCookie creates a dummy Sheriff user and an active session, +func GetVIPCookie(store domain.Store) *http.Cookie { + + user, err := store.GetUserByEmail(context.Background(), "vip_test@RiskRancher.com") + if err != nil { + user, _ = store.CreateUser(context.Background(), "vip_test@RiskRancher.com", "Test VIP", "hash", "Sheriff") + } + + token := "vip_test_token_999" + store.CreateSession(context.Background(), token, user.ID, time.Now().Add(1*time.Hour)) + + return &http.Cookie{ + Name: "session_token", + Value: token, + } +} + +func TestCreateSingleTicket(t *testing.T) { + app, db := setupTestTickets(t) + defer db.Close() + + payload := []byte(`{ + "title": "Manual Pentest Finding: XSS", + "description": "Found reflected XSS on the search page.", + "recommended_remediation": "Sanitize user input.", + "severity": "High" + }`) + + req := httptest.NewRequest(http.MethodPost, "/api/tickets", bytes.NewBuffer(payload)) + req.AddCookie(GetVIPCookie(app.Store)) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + + app.HandleCreateTicket(rr, req) + + if status := rr.Code; status != http.StatusCreated { + t.Fatalf("Expected status %v, got %v. Body: %s", http.StatusCreated, status, rr.Body.String()) + } + + var createdTicket domain.Ticket + if err := json.NewDecoder(rr.Body).Decode(&createdTicket); err != nil { + t.Fatalf("Failed to decode JSON response: %v", err) + } + + if createdTicket.ID == 0 { + t.Errorf("Expected database to generate an ID") + } + if createdTicket.DedupeHash == "" { + t.Errorf("Expected engine to generate a dedupe hash") + } +} diff --git a/pkg/tickets/tickets.go b/pkg/tickets/tickets.go new file mode 100644 index 0000000..8aab667 --- /dev/null +++ b/pkg/tickets/tickets.go @@ -0,0 +1,74 @@ +package tickets + +import ( + "encoding/json" + "net/http" + "strconv" + + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" +) + +type InlineUpdateRequest struct { + Severity string `json:"severity"` + Comment string `json:"comment"` + Description string `json:"description"` + RecommendedRemediation string `json:"recommended_remediation"` + Actor string `json:"actor"` + Status string `json:"status"` + Assignee string `json:"assignee"` +} + +type BulkUpdateRequest struct { + TicketIDs []int `json:"ticket_ids"` + Status string `json:"status"` + Comment string `json:"comment"` + Assignee string `json:"assignee"` + Actor string `json:"actor"` +} + +type MagistrateReviewRequest struct { + Action string `json:"action"` + Actor string `json:"actor"` + Comment string `json:"comment"` + ExtensionDays int `json:"extension_days"` +} + +func (h *Handler) HandleUpdateTicket(w http.ResponseWriter, r *http.Request) { + id, _ := strconv.Atoi(r.PathValue("id")) + var req InlineUpdateRequest + json.NewDecoder(r.Body).Decode(&req) + + if err := h.Store.UpdateTicketInline(r.Context(), id, req.Severity, req.Description, req.RecommendedRemediation, req.Comment, req.Actor, req.Status, req.Assignee); err != nil { + http.Error(w, "Database error", http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusOK) +} + +// HandleGetTickets fetches a list of tickets via the API +func (h *Handler) HandleGetTickets(w http.ResponseWriter, r *http.Request) { + tickets, err := h.Store.GetTickets(r.Context()) + if err != nil { + http.Error(w, "Database error", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(tickets) +} + +// HandleCreateTicket creates a single ticket via the API +func (h *Handler) HandleCreateTicket(w http.ResponseWriter, r *http.Request) { + var t domain.Ticket + if err := json.NewDecoder(r.Body).Decode(&t); err != nil { + http.Error(w, "Invalid JSON payload", http.StatusBadRequest) + return + } + + if err := h.Store.CreateTicket(r.Context(), &t); err != nil { + http.Error(w, "Failed to create ticket", http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(t) +} diff --git a/ui/static/admin.js b/ui/static/admin.js new file mode 100644 index 0000000..627098c --- /dev/null +++ b/ui/static/admin.js @@ -0,0 +1,147 @@ +window.showUpsell = function(featureName) { + document.getElementById('upsellFeatureName').innerText = featureName; + document.getElementById('upsellModal').style.display = 'flex'; +}; + +function switchTab(tabId, btnElement) { + document.querySelectorAll('.tab-pane').forEach(pane => pane.classList.remove('active')); + document.querySelectorAll('.tab-btn').forEach(btn => btn.classList.remove('active')); + document.getElementById(tabId).classList.add('active'); + if(btnElement) btnElement.classList.add('active'); +} + + +// --- CONFIG & USERS LOGIC --- +window.deleteUser = async function(id) { if(confirm("Deactivate this user?")) await fetch(`/api/admin/users/${id}`, { method: 'DELETE' }).then(r => r.ok ? window.location.reload() : alert("Failed")); } +window.editRole = async function(id, currentRole) { + const newRole = prompt("Enter new role (RangeHand, Wrangler, Magistrate, Sheriff):", currentRole); + if(newRole && newRole !== currentRole) await fetch(`/api/admin/users/${id}/role`, { method: 'PATCH', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ global_role: newRole }) }).then(r => r.ok ? window.location.reload() : alert("Failed")); +} +window.resetPassword = async function(id) { if(confirm("Generate new password?")) await fetch(`/api/admin/users/${id}/reset-password`, { method: 'PATCH' }).then(async r => r.ok ? alert("New Password: \n\n" + await r.text()) : alert("Failed")); } +window.deleteRule = async function(id) { if(confirm("Delete rule?")) await fetch(`/api/admin/routing/${id}`, { method: 'DELETE' }).then(r => r.ok ? window.location.reload() : alert("Failed")); } +window.updateBackupPolicy = async function() { const pol = document.getElementById("backupPolicy").value; await fetch(`/api/admin/backup-policy`, { method: 'PUT', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ policy: pol }) }).then(r => r.ok ? alert("Saved") : alert("Failed")); } +window.checkUpdates = async function() { await fetch(`/api/admin/check-updates`).then(async r => alert(await r.text())); } + +document.addEventListener("DOMContentLoaded", function() { + + // --- LOGS ENGINE --- + let currentLogPage = 1; + async function loadLogs() { + const filter = document.getElementById("logFilter").value; + const container = document.getElementById("logContainer"); + container.innerHTML = `
โณ Fetching Page ${currentLogPage}...
`; + try { + const res = await fetch(`/api/admin/logs?page=${currentLogPage}&filter=${filter}`); + if (!res.ok) throw new Error(`HTTP ${res.status}`); + const data = await res.json(); + container.innerHTML = ""; + if (!data.feed || data.feed.length === 0) container.innerHTML = `

No activity found.

`; + else { + data.feed.forEach(item => { + const badgeStr = item.NewValue ? `${item.NewValue}` : ""; + container.innerHTML += `
${item.Actor}[${item.ActivityType.replace('_', ' ')}]
โฑ๏ธ ${item.TimeAgo}

${badgeStr}
`; + }); + } + const totalPages = Math.ceil(data.total / data.limit); + document.getElementById("logPageInfo").innerText = `Showing page ${data.page} of ${totalPages || 1} (Total: ${data.total})`; + document.getElementById("logPrevBtn").disabled = data.page <= 1; + document.getElementById("logNextBtn").disabled = data.page >= totalPages; + } catch (err) { container.innerHTML = `

๐Ÿšจ Error: ${err.message}

`; } + } + + const logFilter = document.getElementById("logFilter"); + if(logFilter) { + logFilter.addEventListener("change", () => { currentLogPage = 1; loadLogs(); }); + document.getElementById("logPrevBtn").addEventListener("click", () => { if(currentLogPage > 1) { currentLogPage--; loadLogs(); } }); + document.getElementById("logNextBtn").addEventListener("click", () => { currentLogPage++; loadLogs(); }); + loadLogs(); + } + + // --- UI INITIALIZERS --- + document.querySelectorAll('.risk-row').forEach(row => { + const rationaleDiv = row.querySelector('.risk-rationale-cell'); + const typeCell = row.querySelector('.risk-type-cell'); + if (!rationaleDiv || !typeCell) return; + let text = rationaleDiv.innerText.trim(); + if (text.includes('[EXTENSION]')) { + typeCell.innerHTML = 'โฑ๏ธ TIME EXTENSION'; + rationaleDiv.innerText = text.replace('[EXTENSION]', '').trim(); + rationaleDiv.style.borderLeft = "3px solid #ea580c"; + } else if (text.includes('[RISK ACCEPTANCE]')) { + typeCell.innerHTML = '๐Ÿ›‘ RISK ACCEPTANCE'; + rationaleDiv.innerText = text.replace('[RISK ACCEPTANCE]', '').trim(); + rationaleDiv.style.borderLeft = "3px solid #dc2626"; + row.style.backgroundColor = "#fff5f5"; + } else { + typeCell.innerHTML = '๐Ÿ“‹ STANDARD'; + } + }); + + + // --- SLA MATRIX SAVE --- + const saveConfigBtn = document.getElementById("saveConfigBtn"); + if(saveConfigBtn) { + saveConfigBtn.addEventListener("click", async function() { + this.innerText = "Saving..."; this.disabled = true; + const payload = { + timezone: document.getElementById("configTimezone").value, + business_start: parseInt(document.getElementById("configBizStart").value), + business_end: parseInt(document.getElementById("configBizEnd").value), + default_extension_days: parseInt(document.getElementById("configDefExt").value), + slas: Array.from(document.querySelectorAll(".sla-row")).map(row => ({ + domain: row.getAttribute("data-domain"), + severity: row.querySelector("span.badge").innerText.trim(), + days_to_triage: parseInt(row.querySelector(".sla-triage").value), + days_to_remediate: parseInt(row.querySelector(".sla-patch").value), + max_extensions: parseInt(row.querySelector(".sla-ext").value) + })) + }; + const res = await fetch("/api/config", { method: "PUT", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) }); + if (res.ok) { this.innerText = "Saved!"; this.style.background = "#10b981"; setTimeout(() => { this.innerText = "Save Changes"; this.style.background = ""; this.disabled = false; }, 2000); } + else { alert("Failed"); this.innerText = "Save Changes"; this.disabled = false; } + }); + } + + // SLA Domain Filter + const domainFilter = document.getElementById("slaDomainFilter"); + if (domainFilter) { + domainFilter.addEventListener("change", function() { + document.querySelectorAll(".sla-row").forEach(row => row.style.display = row.getAttribute("data-domain") === this.value ? "table-row" : "none"); + }); + domainFilter.dispatchEvent(new Event("change")); + } + + // --- MODAL EVENT LISTENERS --- + const openUserModal = document.getElementById("openUserModal"); + if (openUserModal) { + openUserModal.addEventListener("click", () => document.getElementById("userModal").style.display = "flex"); + document.getElementById("cancelUser").addEventListener("click", () => document.getElementById("userModal").style.display = "none"); + document.getElementById("submitUser").addEventListener("click", async function() { + const payload = { full_name: document.getElementById("newUserName").value, email: document.getElementById("newUserEmail").value, password: document.getElementById("newUserPassword").value, global_role: document.getElementById("newUserRole").value }; + if (!payload.full_name || !payload.email || !payload.password) return alert("Fill out all fields."); + this.disabled = true; + await fetch("/api/admin/users", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) }).then(async r => r.ok ? window.location.reload() : alert(await r.text())); + this.disabled = false; + }); + } + + const newRuleType = document.getElementById("newRuleType"); + if (newRuleType) { + newRuleType.addEventListener("change", function() { + document.getElementById("newRuleMatchSource").style.display = this.value === "Source" ? "block" : "none"; + document.getElementById("newRuleMatchAsset").style.display = this.value === "Source" ? "none" : "block"; + }); + document.getElementById("openRuleModal").addEventListener("click", () => document.getElementById("ruleModal").style.display = "flex"); + document.getElementById("cancelRule").addEventListener("click", () => document.getElementById("ruleModal").style.display = "none"); + document.getElementById("submitRule").addEventListener("click", async function() { + const ruleType = document.getElementById("newRuleType").value; + const matchVal = ruleType === "Source" ? document.getElementById("newRuleMatchSource").value : document.getElementById("newRuleMatchAsset").value; + const assigneeSelect = document.getElementById("newRuleAssignee"); + const selectedEmails = Array.from(assigneeSelect.selectedOptions).map(opt => opt.value).join(","); + if (!matchVal || !selectedEmails) return alert("Fill out match value and assignee."); + this.disabled = true; this.innerText = "Saving..."; + await fetch("/api/admin/routing", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ rule_type: ruleType, match_value: matchVal, assignee: selectedEmails, role: "RangeHand" }) }).then(async r => r.ok ? window.location.reload() : alert(await r.text())); + this.disabled = false; this.innerText = "Deploy Rule"; + }); + } +}); \ No newline at end of file diff --git a/ui/static/auth.js b/ui/static/auth.js new file mode 100644 index 0000000..6dbab17 --- /dev/null +++ b/ui/static/auth.js @@ -0,0 +1,52 @@ +document.addEventListener("DOMContentLoaded", () => { + + // --- LOGIN LOGIC --- + const loginForm = document.getElementById("loginForm"); + if (loginForm) { + loginForm.addEventListener("submit", async (e) => { + e.preventDefault(); + const btn = document.getElementById("submitBtn"); + const errDiv = document.getElementById("errorMsg"); + + btn.innerText = "Authenticating..."; btn.disabled = true; errDiv.style.display = "none"; + + try { + const res = await fetch("/api/auth/login", { + method: "POST", headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ email: document.getElementById("email").value, password: document.getElementById("password").value }) + }); + + if (res.ok) window.location.href = "/dashboard"; + else { errDiv.innerText = "Invalid credentials. Please try again."; errDiv.style.display = "block"; btn.innerText = "Sign In"; btn.disabled = false; } + } catch (err) { errDiv.innerText = "Network error."; errDiv.style.display = "block"; btn.innerText = "Sign In"; btn.disabled = false; } + }); + } + + // --- REGISTER LOGIC --- + const registerForm = document.getElementById("registerForm"); + if (registerForm) { + registerForm.addEventListener("submit", async (e) => { + e.preventDefault(); + const btn = document.getElementById("submitBtn"); + const errDiv = document.getElementById("errorMsg"); + + btn.innerText = "Securing System..."; btn.disabled = true; errDiv.style.display = "none"; + + const payload = { + full_name: document.getElementById("fullname").value, email: document.getElementById("email").value, + password: document.getElementById("password").value, global_role: "Sheriff" + }; + + try { + const res = await fetch("/api/auth/register", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) }); + if (res.ok) { + const loginRes = await fetch("/api/auth/login", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ email: payload.email, password: payload.password }) }); + if (loginRes.ok) window.location.href = "/dashboard"; else window.location.href = "/login"; + } else { + errDiv.innerText = await res.text() || "Registration failed. System might already be locked."; + errDiv.style.display = "block"; btn.innerText = "Claim Sheriff Access"; btn.disabled = false; + } + } catch (err) { errDiv.innerText = "Network error."; errDiv.style.display = "block"; btn.innerText = "Claim Sheriff Access"; btn.disabled = false; } + }); + } +}); \ No newline at end of file diff --git a/ui/static/builder.js b/ui/static/builder.js new file mode 100644 index 0000000..3c71906 --- /dev/null +++ b/ui/static/builder.js @@ -0,0 +1,198 @@ +const fileInput = document.getElementById('local-file'); +const pathInput = document.getElementById('findings_path'); +let currentRawData = null; +let isJson = false; + +fileInput.addEventListener('change', (e) => { + const file = e.target.files[0]; + if (!file) return; + isJson = file.name.toLowerCase().endsWith('.json'); + + const reader = new FileReader(); + reader.onload = (event) => { + currentRawData = event.target.result; + document.getElementById('preview-placeholder').style.display = 'none'; + + if (isJson) { + try { + const parsed = JSON.parse(currentRawData); + const guessedPath = autoDetectArrayPath(parsed); + if (guessedPath) { + pathInput.value = guessedPath; + } + } catch (e) { + console.error("Auto-detect failed:", e); + } + } + + processPreview(); + }; + reader.readAsText(file); +}); + +pathInput.addEventListener('input', () => { + if (currentRawData && isJson) processPreview(); +}); + +function autoDetectArrayPath(obj) { + if (Array.isArray(obj)) return "."; + + let bestPath = ""; + let maxLen = -1; + + function search(currentObj, currentPath) { + if (Array.isArray(currentObj)) { + if (currentObj.length > 0 && typeof currentObj[0] === 'object') { + if (currentObj.length > maxLen) { + maxLen = currentObj.length; + bestPath = currentPath; + } + } + return; + } + if (currentObj !== null && typeof currentObj === 'object') { + for (let key in currentObj) { + let nextPath = currentPath ? currentPath + "." + key : key; + search(currentObj[key], nextPath); + } + } + } + + search(obj, ""); + return bestPath || "."; +} + + +function processPreview() { + let headers = []; + let rows = []; + + if (isJson) { + try { + const parsed = JSON.parse(currentRawData); + const findings = getNestedValue(parsed, pathInput.value); + + + if (!Array.isArray(findings) || findings.length === 0) { + const rawPreview = JSON.stringify(parsed, null, 2).substring(0, 1500) + "\n\n... (file truncated for preview)"; + + document.getElementById('preview-table-container').innerHTML = + `
+

โš ๏ธ Path "${pathInput.value}" is not an array.

+

Here is the structure of your file to help you find the correct path:

+
${rawPreview}
+
`; + + document.getElementById('save-btn').classList.add('disabled'); + return; + } + + document.getElementById('save-btn').classList.remove('disabled'); + headers = Object.keys(findings[0]); + rows = findings.slice(0, 5).map(obj => headers.map(h => formatCell(obj[h]))); + } catch(e) { + document.getElementById('preview-table-container').innerHTML = `
JSON Parse Error: ${e.message}
`; + return; + } + } else { + const lines = currentRawData.split('\n').filter(l => l.trim() !== ''); + headers = lines[0].split(',').map(h => h.trim()); + rows = lines.slice(1, 6).map(line => line.split(',').map(c => c.trim())); + document.getElementById('save-btn').classList.remove('disabled'); + } + + renderTable(headers, rows); + populateDropdowns(headers); +} + +function getNestedValue(obj, path) { + if (path === '' || path === '.') return obj; + return path.split('.').reduce((acc, part) => acc && acc[part], obj); +} + +function formatCell(val) { + if (typeof val === 'object') return JSON.stringify(val); + if (val === undefined || val === null) return ""; + const str = String(val); + return str.length > 50 ? str.substring(0, 47) + "..." : str; +} + +function renderTable(headers, rows) { + let html = ''; + html += '' + headers.map(h => ``).join('') + ''; + rows.forEach(row => { + html += '' + row.map(cell => ``).join('') + ''; + }); + html += '
${h}
${cell}
'; + document.getElementById('preview-table-container').innerHTML = html; +} + +function populateDropdowns(headers) { + const selects = document.querySelectorAll('.source-header'); + selects.forEach(select => { + select.innerHTML = ''; + headers.forEach(h => { + const opt = document.createElement('option'); + opt.value = h; + opt.textContent = h; + select.appendChild(opt); + }); + }); +} + +document.getElementById('adapter-form').onsubmit = async (e) => { + e.preventDefault(); + const data = { + name: document.getElementById('name').value, + source_name: document.getElementById('source_name').value, + findings_path: document.getElementById('findings_path').value, + mapping_title: document.getElementById('mapping_title').value, + mapping_asset: document.getElementById('mapping_asset').value, + mapping_severity: document.getElementById('mapping_severity').value, + mapping_description: document.getElementById('mapping_description').value, + mapping_remediation: document.getElementById('mapping_remediation').value + }; + + const resp = await fetch('/api/adapters', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(data) + }); + + if (resp.ok) { + alert("Adapter Saved! Taking you back to the Landing Zone."); + window.location.href = "/ingest"; + } else { + alert("Failed to save adapter: " + await resp.text()); + } +}; + +window.exportAdapterJSON = function() { + const name = document.getElementById("adapterName").value.trim(); + const sourceName = document.getElementById("sourceName").value.trim(); + const rootPath = document.getElementById("rootPath").value.trim(); + + if (!name || !sourceName) { + return alert("Adapter Name and Source Name are required to export."); + } + + const payload = { + name: name, + source_name: sourceName, + findings_path: rootPath, + mapping_title: document.getElementById("mapTitle").value.trim(), + mapping_asset: document.getElementById("mapAsset").value.trim(), + mapping_severity: document.getElementById("mapSeverity").value.trim(), + mapping_description: document.getElementById("mapDesc").value.trim(), + mapping_remediation: document.getElementById("mapRem").value.trim() + }; + + // Create a downloadable JSON blob + const dataStr = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(payload, null, 4)); + const downloadAnchorNode = document.createElement('a'); + downloadAnchorNode.setAttribute("href", dataStr); + downloadAnchorNode.setAttribute("download", `${sourceName.toLowerCase().replace(/\s+/g, '_')}_adapter.json`); + document.body.appendChild(downloadAnchorNode); + downloadAnchorNode.click(); + downloadAnchorNode.remove(); +}; \ No newline at end of file diff --git a/ui/static/dashboard.js b/ui/static/dashboard.js new file mode 100644 index 0000000..23033dc --- /dev/null +++ b/ui/static/dashboard.js @@ -0,0 +1,341 @@ + +window.showUpsell = function(featureName) { + const featureNameEl = document.getElementById('upsellFeatureName'); + const modalEl = document.getElementById('upsellModal'); + if (featureNameEl && modalEl) { + featureNameEl.innerText = featureName; + modalEl.style.display = 'flex'; + } else { + alert("This feature (" + featureName + ") is available in RiskRancher Pro!"); + } +}; + + +window.renderMarkdown = function(text) { + if (!text) return "No description provided."; + let html = text.replace(/!\[.*?\]\((.*?)\)/g, '

'); + html = html.replace(/\n/g, '
'); + return html; +}; + + +window.updateDrawerPreview = function() { + const rawDesc = document.getElementById('drawerDescEdit').value; + document.getElementById('drawerDescPreview').innerHTML = renderMarkdown(rawDesc); +}; + + +window.openDrawer = function(id, title, asset, severity) { + document.getElementById('drawerTicketID').value = id; + document.getElementById('drawerTitle').innerText = title; + document.getElementById('drawerAsset').innerText = asset; + + const badge = document.getElementById('drawerBadge'); + badge.innerText = severity; + badge.className = `badge ${severity.toLowerCase()}`; + + document.getElementById('drawerSeverity').value = severity; + document.getElementById('drawerComment').value = ""; + + const rawDesc = document.getElementById('desc-' + id) ? document.getElementById('desc-' + id).value : ""; + const rawRem = document.getElementById('rem-' + id) ? document.getElementById('rem-' + id).value : ""; + const rawEv = document.getElementById('ev-' + id) ? document.getElementById('ev-' + id).value : ""; + const status = document.getElementById('status-' + id) ? document.getElementById('status-' + id).value : ""; + const rawComment = document.getElementById('comment-' + id) ? document.getElementById('comment-' + id).value : ""; + const assignee = document.getElementById('assignee-' + id) ? document.getElementById('assignee-' + id).value : ""; + + document.getElementById('drawerDescEdit').value = rawDesc; + document.getElementById('drawerRemEdit').value = rawRem; + + const drawerAssignee = document.getElementById('drawerAssignee'); + if (drawerAssignee) { + drawerAssignee.value = (assignee === "Unassigned") ? "" : assignee; + } + + const evBlock = document.getElementById('drawerEvidenceBlock'); + const evText = document.getElementById('drawerEvidenceText'); + if (evBlock && evText) { + if (rawEv && rawEv.trim() !== "") { + evText.innerText = rawEv; + evBlock.style.display = "block"; + } else { + evBlock.style.display = "none"; + evText.innerText = ""; + } + } + + const retBlock = document.getElementById('drawerReturnedBlock'); + const retText = document.getElementById('drawerReturnedText'); + if (retBlock && retText) { + if (status === 'Returned to Security' && rawComment) { + retText.innerText = rawComment; + retBlock.style.display = "block"; + } else { + retBlock.style.display = "none"; + retText.innerText = ""; + } + } + + const standardActions = document.getElementById('drawerStandardActions'); + const editControls = document.getElementById('drawerEditControls'); + + if (window.CurrentTab === 'archives') { + if(standardActions) standardActions.style.display = 'none'; + if(editControls) editControls.style.display = 'none'; + } else { + if(standardActions) standardActions.style.display = 'flex'; + if(editControls) editControls.style.display = 'block'; + } + + updateDrawerPreview(); + + document.getElementById('ticketDrawer').style.width = '600px'; + document.getElementById('ticketDrawer').classList.add('open'); + document.getElementById('drawerOverlay').style.display = 'block'; +}; + +window.closeDrawer = function() { + document.getElementById('ticketDrawer').classList.remove('open'); + document.getElementById('drawerOverlay').style.display = 'none'; +}; + +window.openNewTicketModal = function() { + // Clear out old values just in case + document.getElementById('newTicketTitle').value = ''; + document.getElementById('newTicketAsset').value = ''; + document.getElementById('newTicketDesc').value = ''; + document.getElementById('newTicketSeverity').value = 'High'; + + document.getElementById('newTicketModal').style.display = 'flex'; +}; + +window.submitNewTicket = async function() { + const title = document.getElementById('newTicketTitle').value.trim(); + const asset = document.getElementById('newTicketAsset').value.trim(); + const severity = document.getElementById('newTicketSeverity').value; + const desc = document.getElementById('newTicketDesc').value.trim(); + + if (!title || !asset) { + return alert("Title and Asset Identifier are required!"); + } + + const payload = { + title: title, + asset_identifier: asset, + severity: severity, + description: desc, + source: "Manual", + status: "Waiting to be Triaged" + }; + + try { + const res = await fetch('/api/tickets', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(payload) + }); + + if (res.ok) { + window.location.reload(); + } else { + alert("Failed to create ticket."); + } + } catch (err) { + alert("Network error."); + } +}; + +window.toggleAssetGroup = function(safeAsset) { + document.querySelectorAll(`.group-${safeAsset}`).forEach(r => { + r.style.display = r.style.display === "none" ? "table-row" : "none"; + }); +}; + +function initializeAssetTree() { + const tbody = document.getElementById("ticketTableBody"); + if (!tbody) return; + + const rows = Array.from(tbody.querySelectorAll("tr.ticket-row")); + if (rows.length === 0) { + document.getElementById("mainTableHeader").style.display = "table-header-group"; + tbody.innerHTML = `No tickets found in this queue. The ranch is quiet! ๐Ÿค `; + return; + } + + const assets = {}; + rows.forEach(r => { + const asset = r.getAttribute("data-asset") || "Unknown"; + if (!assets[asset]) assets[asset] = []; + assets[asset].push(r); + }); + + tbody.innerHTML = ""; + + for (const asset in assets) { + const findings = assets[asset]; + const safeAsset = asset.replace(/[^a-zA-Z0-9-_]/g, '-'); + + let overdueCount = 0; + let counts = { Critical: 0, High: 0, Medium: 0, Low: 0, Info: 0 }; + + findings.forEach(r => { + const sev = r.querySelector('.badge').innerText.trim(); + if (counts[sev] !== undefined) counts[sev]++; + const triageTimerSpan = r.querySelector('.triage-timer'); + if (triageTimerSpan) { + const dueStr = triageTimerSpan.getAttribute('data-due'); + if (dueStr) { + const due = new Date(dueStr); + if (Math.ceil((due - new Date()) / (1000 * 60 * 60 * 24)) < 0) overdueCount++; + } + } else if (r.querySelector('span[style*="color: #dc2626"]')) { + overdueCount++; + } + }); + + let badges = ''; + if (counts.Critical > 0) badges += `${counts.Critical} C`; + if (counts.High > 0) badges += `${counts.High} H`; + if (counts.Medium > 0) badges += `${counts.Medium} M`; + if (counts.Low > 0) badges += `${counts.Low} L`; + if (overdueCount > 0) badges += `overdue:${overdueCount}`; + + let shareButtonHtml = ''; + if (window.CurrentTab === 'chute') { + shareButtonHtml = ``; + } else if (window.CurrentTab === 'holding_pen') { + shareButtonHtml = `Assign out to share`; + } + + const headerTr = document.createElement("tr"); + headerTr.className = "asset-header-row"; + headerTr.innerHTML = ` + + + ๐Ÿ“‚ ${asset} + (${findings.length}) ${badges} + + ${shareButtonHtml} + `; + tbody.appendChild(headerTr); + + const assetDetailsTr = document.createElement("tr"); + assetDetailsTr.className = `group-${safeAsset}`; + assetDetailsTr.style.display = "none"; + assetDetailsTr.innerHTML = ` + +
+
+
+
+ + `; + tbody.appendChild(assetDetailsTr); + + const innerTableBody = assetDetailsTr.querySelector('tbody'); + findings.forEach(r => { + r.style.boxShadow = "0 1px 2px rgba(0,0,0,0.05)"; + const cells = r.querySelectorAll('td'); + if (cells.length >= 6) { cells[1].style.width = "120px"; cells[2].style.width = "100px"; cells[4].style.width = "160px"; cells[5].style.width = "160px"; } + innerTableBody.appendChild(r); + }); + + headerTr.querySelector('.asset-cb').addEventListener('change', function() { + const isChecked = this.checked; + innerTableBody.querySelectorAll('.ticket-cb').forEach(cb => cb.checked = isChecked); + }); + } +} + +document.addEventListener("DOMContentLoaded", function() { + window.markFalsePositive = async function() { + const id = parseInt(document.getElementById("drawerTicketID").value); + const comment = document.getElementById("drawerComment").value; + if (!comment.trim()) return alert("An audit trail comment is strictly required."); + + const btn = document.querySelector('button[onclick="markFalsePositive()"]'); + if (btn) { + btn.innerText = "Processing..."; + btn.disabled = true; + } + + try { + const res = await fetch(`/api/tickets/${id}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + status: "False Positive", + comment: "[False Positive] " + comment, + actor: "Analyst" + }) + }); + if (res.ok) { + window.location.reload(); + } else { + alert("Failed."); + if (btn) { + btn.innerText = "๐Ÿšซ Mark False Positive"; + btn.disabled = false; + } + } + } catch (err) { + alert("Network error."); + if (btn) btn.disabled = false; + } + }; + + document.querySelectorAll('.triage-timer').forEach(el => { + const dueStr = el.getAttribute('data-due'); + if (!dueStr) return; + const diffDays = Math.ceil((new Date(dueStr) - new Date()) / (1000 * 60 * 60 * 24)); + const baseStyle = "display: inline-block; white-space: nowrap; padding: 4px 10px; border-radius: 12px; font-size: 0.8rem; font-weight: bold;"; + if (diffDays < 0) el.innerHTML = `Overdue by ${Math.abs(diffDays)}d`; + else if (diffDays === 0) el.innerHTML = `Due Today`; + else el.innerHTML = `${diffDays} days left`; + }); + + initializeAssetTree(); + + const drawerSubmitBtn = document.getElementById("drawerSubmitBtn"); + if(drawerSubmitBtn) { + drawerSubmitBtn.addEventListener("click", async function() { + const id = document.getElementById("drawerTicketID").value; + const newSev = document.getElementById("drawerSeverity").value; + const comment = document.getElementById("drawerComment").value; + const newDesc = document.getElementById("drawerDescEdit").value; + const newRem = document.getElementById("drawerRemEdit").value; + + const assigneeInput = document.getElementById("drawerAssignee"); + const newAssignee = assigneeInput ? assigneeInput.value.trim() : ""; + const currentStatus = document.getElementById("status-" + id).value; + + let newStatus = currentStatus; + if (newAssignee !== "" && newAssignee !== "Unassigned") { + newStatus = "Assigned Out"; + } else if (currentStatus === "Returned to Security") { + newStatus = "Waiting to be Triaged"; + } + + if (!comment.trim()) return alert("An audit trail comment is strictly required when modifying a finding."); + + this.innerText = "Saving..."; this.disabled = true; + try { + const res = await fetch(`/api/tickets/${id}`, { + method: 'PATCH', headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + severity: newSev, + comment: comment, + description: newDesc, + recommended_remediation: newRem, + actor: "Analyst", + status: newStatus, + assignee: newAssignee || "Unassigned" + }) + }); + + if (res.ok) window.location.reload(); + else { alert("Update failed."); this.innerText = "Save & Dispatch"; this.disabled = false; } + } catch (err) { alert("Network error."); this.disabled = false; } + }); + } +}); \ No newline at end of file diff --git a/ui/static/ingest.js b/ui/static/ingest.js new file mode 100644 index 0000000..bb6679b --- /dev/null +++ b/ui/static/ingest.js @@ -0,0 +1,68 @@ +const dropZone = document.getElementById('drop-zone'); +const fileInput = document.getElementById('file-input'); + +async function processFile(file) { + const statusText = document.getElementById('status-text'); + document.getElementById('status-area').classList.remove('d-none'); + + const adapterSelect = document.getElementById('adapter-select'); + const adapterId = adapterSelect.value; + + let adapterName = ""; + if (adapterSelect.selectedIndex > 0) { + adapterName = adapterSelect.options[adapterSelect.selectedIndex].getAttribute('data-name'); + } + + try { + let response; + + if (file.name.toLowerCase().endsWith('.json')) { + if (!adapterName) { + statusText.innerText = "Unknown JSON format. Redirecting to Adapter Builder..."; + setTimeout(() => { + window.location.href = `/admin/adapters/new?filename=${encodeURIComponent(file.name)}`; + }, 1200); + return; + } + + const rawText = await file.text(); + response = await fetch(`/api/ingest/${encodeURIComponent(adapterName)}`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: rawText + }); + } + else { + let formData = new FormData(); + formData.append('file', file); + if (adapterId) { + formData.append('adapter_id', adapterId); + } + + response = await fetch('/api/ingest/csv', { method: 'POST', body: formData }); + } + + if (!response.ok) { + if (response.status === 404) { + statusText.innerText = "Format not recognized. Redirecting to Adapter Builder..."; + setTimeout(() => { + window.location.href = `/admin/adapters/new?filename=${encodeURIComponent(file.name)}`; + }, 1200); + } else { + const errText = await response.text(); + throw new Error(errText); + } + } else { + statusText.innerText = "Yeehaw! Tickets corralled successfully."; + setTimeout(() => window.location.href = "/dashboard", 800); + } + } catch (err) { + statusText.innerText = "Stampede! Error: " + err.message; + } +} + +dropZone.onclick = () => fileInput.click(); +fileInput.onchange = (e) => processFile(e.target.files[0]); +dropZone.ondragover = (e) => { e.preventDefault(); dropZone.style.background = "#e1f5fe"; }; +dropZone.ondragleave = () => dropZone.style.background = "#f8f9fa"; +dropZone.ondrop = (e) => { e.preventDefault(); processFile(e.dataTransfer.files[0]); }; \ No newline at end of file diff --git a/ui/static/parser.js b/ui/static/parser.js new file mode 100644 index 0000000..908e5cc --- /dev/null +++ b/ui/static/parser.js @@ -0,0 +1,282 @@ + +const reportID = window.location.pathname.split("/").pop(); +const clipBtn = document.getElementById('clip-btn'); +const viewer = document.getElementById('document-viewer'); + +window.activeTextarea = null; +document.addEventListener('focusin', function(e) { + if (e.target && e.target.classList.contains('draft-desc')) { + window.activeTextarea = e.target; + } +}); + +viewer.addEventListener('mouseup', function(e) { + let selection = window.getSelection(); + let text = selection.toString().trim(); + + if (text.length > 5) { + clipBtn.style.top = `${e.pageY - 50}px`; + clipBtn.style.left = `${e.pageX - 60}px`; + clipBtn.style.display = 'block'; + + clipBtn.onclick = async () => { + await saveNewDraft(text); + clipBtn.style.display = 'none'; + selection.removeAllRanges(); + }; + } else { + clipBtn.style.display = 'none'; + } +}); + +document.addEventListener('mousedown', (e) => { + if (e.target !== clipBtn && !viewer.contains(e.target)) { + clipBtn.style.display = 'none'; + } +}); + +viewer.addEventListener('click', async function(e) { + if (e.target.tagName === 'IMG' && e.target.classList.contains('pentest-img')) { + + const originalBorder = e.target.style.border; + e.target.style.transition = "border 0.2s, transform 0.2s"; + e.target.style.border = "4px solid #f59e0b"; + e.target.style.transform = "scale(0.98)"; + + try { + const uploadRes = await fetch('/api/images/upload', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ image_data: e.target.src }) + }); + + if (!uploadRes.ok) throw new Error("Failed to upload image"); + const data = await uploadRes.json(); + const markdownImage = `![Proof of Concept](${data.url})`; + + if (window.activeTextarea) { + const start = window.activeTextarea.selectionStart; + const end = window.activeTextarea.selectionEnd; + const text = window.activeTextarea.value; + + window.activeTextarea.value = text.substring(0, start) + `\n${markdownImage}\n` + text.substring(end); + + const draftId = window.activeTextarea.getAttribute('data-id'); + updateLivePreview(draftId); + updateDraftField(draftId); + + } else { + if (confirm("๐Ÿ“ธ Extract this screenshot into a BRAND NEW finding?\n\n(Tip: To add it to an existing finding, just click inside its Description box first!)")) { + await saveNewDraft(markdownImage); + } + } + + e.target.style.border = "4px solid #10b981"; + setTimeout(() => { + e.target.style.border = originalBorder; + e.target.style.transform = "scale(1)"; + }, 800); + + } catch (err) { + console.error(err); + e.target.style.border = "4px solid #ef4444"; + alert("Error extracting image: " + err.message); + } + } +}); + +async function saveNewDraft(text) { + try { + const res = await fetch(`/api/drafts/report/${reportID}`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ description: text }) + }); + if (res.ok) loadDrafts(); + else alert("Failed to save draft: " + await res.text()); + } catch (err) { + alert("Network error saving draft."); + } +} + +window.updateDraftField = function(id) { + const card = document.querySelector(`.draft-card[data-id="${id}"]`); + if (!card) return; + + const payload = { + title: card.querySelector('.draft-title').value, + asset_identifier: card.querySelector('.draft-asset').value, + severity: card.querySelector('.draft-severity').value, + description: card.querySelector('.draft-desc').value, + recommended_remediation: card.querySelector('.draft-remediation').value + }; + + fetch(`/api/drafts/${id}`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(payload) + }).catch(e => console.error("Auto-save failed", e)); +} + +window.renderMarkdown = function(text) { + if (!text) return ""; + let html = text.replace(/!\[.*?\]\((.*?)\)/g, '

'); + return html; +} + +window.updateLivePreview = function(id) { + const card = document.querySelector(`.draft-card[data-id="${id}"]`); + if (!card) return; + const desc = card.querySelector('.draft-desc').value; + const preview = document.getElementById(`preview-${id}`); + + if (desc.includes('![')) { + preview.style.display = 'block'; + preview.innerHTML = renderMarkdown(desc); + } else { + preview.style.display = 'none'; + } +} + +async function loadDrafts() { + try { + const res = await fetch(`/api/drafts/report/${reportID}`); + if (!res.ok) return; + const drafts = await res.json(); + + const list = document.getElementById('draft-list'); + if (!drafts || drafts.length === 0) { + list.innerHTML = `
No drafts yet.

Highlight text on the left to begin clipping.
`; + return; + } + + let html = ''; + drafts.forEach(d => { + html += ` +
+
+ + +
+
+ +
+
+ + + +
+ ${renderMarkdown(d.description || '')} +
+
+
+ +
+
+ + +
+
`; + }); + list.innerHTML = html; + } catch (err) { + console.error("Failed to load drafts", err); + } +} + +window.deleteDraft = async function(id) { + if (!confirm("Discard this finding?")) return; + try { + const res = await fetch(`/api/drafts/${id}`, { method: 'DELETE' }); + if (res.ok) loadDrafts(); + } catch (err) { + alert("Error discarding draft."); + } +} + +window.promoteAllDrafts = async function() { + const cards = document.querySelectorAll('.draft-card'); + if (cards.length === 0) return alert("No drafts to promote!"); + + const payload = []; + let hasError = false; + + cards.forEach(card => { + const id = parseInt(card.getAttribute('data-id')); + const titleInput = card.querySelector('.draft-title'); + const assetInput = card.querySelector('.draft-asset'); + + const title = titleInput.value.trim(); + const asset = assetInput.value.trim(); + const severity = card.querySelector('.draft-severity').value; + const description = card.querySelector('.draft-desc').value.trim(); + const remediation = card.querySelector('.draft-remediation').value.trim(); + + if (!title || !asset) { + if (!title) titleInput.style.borderColor = '#dc2626'; + if (!asset) assetInput.style.borderColor = '#dc2626'; + hasError = true; + } + + payload.push({ id, title, asset_identifier: asset, severity, description, recommended_remediation: remediation }); + }); + + if (hasError) return alert("๐Ÿšจ Title and Asset Identifier are required."); + + try { + const res = await fetch(`/api/reports/promote/${reportID}`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(payload) + }); + + if (res.ok) { + alert("๐Ÿค  Yeehaw! Findings promoted to your Holding Pen."); + window.location.href = "/dashboard"; + } else { + alert("Failed to promote: " + await res.text()); + } + } catch (err) { + alert("Network error during promotion."); + } +} + +window.smartSnap = function(btnElement) { + const viewer = document.getElementById('document-viewer'); + const fullText = decodeURIComponent(btnElement.getAttribute('data-text')); + + const searchStr = fullText.split('\n')[0].substring(0, 30).trim(); + if (!searchStr || searchStr.startsWith("![")) return alert("Cannot snap to image blocks."); + + const paragraphs = viewer.getElementsByTagName('p'); + let foundElement = null; + + for (let p of paragraphs) { + if (p.innerText.length > 50 && p.innerText.includes(searchStr)) { + foundElement = p; + break; + } + } + + if (foundElement) { + foundElement.scrollIntoView({ behavior: "smooth", block: "center" }); + const originalBg = foundElement.style.backgroundColor; + foundElement.style.transition = "background-color 0.4s"; + foundElement.style.backgroundColor = "#bfdbfe"; + setTimeout(() => foundElement.style.backgroundColor = originalBg, 1200); + + const originalText = btnElement.innerText; + btnElement.innerText = "๐ŸŽฏ Snapped!"; + setTimeout(() => btnElement.innerText = originalText, 1500); + } else { + alert("Could not locate the exact paragraph body in the document."); + } +} + +document.addEventListener("DOMContentLoaded", loadDrafts); \ No newline at end of file diff --git a/ui/static/style.css b/ui/static/style.css new file mode 100644 index 0000000..f10159c --- /dev/null +++ b/ui/static/style.css @@ -0,0 +1,409 @@ +:root { + --bg-color: #f4f4f9; + --card-bg: #ffffff; + --text-main: #333333; + --text-muted: #6b7280; + --primary: #2563eb; + --primary-hover: #1d4ed8; + --border: #e5e7eb; + --critical: #ef4444; + --high: #f97316; + --medium: #eab308; + --low: #3b82f6; +} + +* { + box-sizing: border-box; + margin: 0; + padding: 0; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif; +} + +body { + background-color: var(--bg-color); + color: var(--text-main); + line-height: 1.6; +} + +.container { + max-width: 1200px; + margin: 0 auto; + padding: 20px; +} + +header { + background-color: var(--card-bg); + padding: 20px; + border-bottom: 1px solid var(--border); + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 30px; + box-shadow: 0 1px 3px rgba(0,0,0,0.05); +} + +h1, h2, h3 { + color: var(--text-main); +} + +.card { + background-color: var(--card-bg); + border-radius: 8px; + padding: 20px; + box-shadow: 0 1px 3px rgba(0,0,0,0.1); + border: 1px solid var(--border); +} + +/* A simple, clean table for our vulnerabilities */ +table { + width: 100%; + border-collapse: collapse; + margin-top: 15px; +} + +th, td { + padding: 12px; + text-align: left; + border-bottom: 1px solid var(--border); +} + +th { + background-color: #f9fafb; + color: var(--text-muted); + font-weight: 600; + text-transform: uppercase; + font-size: 0.85rem; +} + +.badge { + padding: 4px 8px; + border-radius: 9999px; + font-size: 0.75rem; + font-weight: bold; + color: white; +} + +.badge.critical { background-color: var(--critical); } +.badge.high { background-color: var(--high); } +.badge.medium { background-color: var(--medium); color: #000; } +.badge.low { background-color: var(--low); } + + +/* File: RiskRancher/ui/static/style.css (Append to bottom) */ + +/* The Tab Navigation Bar */ +.tabs { + display: flex; + border-bottom: 1px solid var(--border); + margin-bottom: 20px; + gap: 20px; +} + +.tab-link { + padding: 10px 5px; + text-decoration: none; + color: var(--text-muted); + font-weight: 600; + border-bottom: 3px solid transparent; + transition: all 0.2s ease; +} + +.tab-link:hover { + color: var(--primary); +} + +.tab-link.active { + color: var(--primary); + border-bottom: 3px solid var(--primary); +} + +/* Pagination Controls */ +.pagination { + display: flex; + justify-content: space-between; + align-items: center; + margin-top: 20px; + padding-top: 15px; + border-top: 1px solid var(--border); +} + +.btn { + padding: 8px 16px; + background-color: var(--card-bg); + border: 1px solid var(--border); + border-radius: 6px; + color: var(--text-main); + text-decoration: none; + font-weight: 600; + font-size: 0.85rem; + cursor: pointer; +} + +.btn:hover { + background-color: #f9fafb; +} + +.btn.disabled { + opacity: 0.5; + cursor: not-allowed; + pointer-events: none; +} + +/* Info Severity Badge */ +.badge.info { background-color: #9ca3af; } + +/* SLA Badges */ +.sla-badge { + padding: 4px 8px; + border-radius: 4px; + font-size: 0.75rem; + font-weight: bold; + border: 1px solid transparent; +} +.sla-safe { background-color: #dcfce7; color: #166534; border-color: #bbf7d0; } +.sla-warn { background-color: #fef08a; color: #854d0e; border-color: #fde047; } +.sla-breach { background-color: #fee2e2; color: #991b1b; border-color: #fecaca; } + +/* Table Checkboxes for future Bulk Actions */ +td input[type="checkbox"], th input[type="checkbox"] { + cursor: pointer; + width: 16px; + height: 16px; +} + +/* ========================================== + THE CORRAL (TABS & PILLS) + ========================================== */ + +/* The Tab Navigation */ +.nav-tabs { + display: flex; + border-bottom: 2px solid #e2e8f0; + margin-bottom: 20px; + gap: 20px; +} + +.nav-tabs a { + text-decoration: none; + color: #64748b; + font-weight: 600; + padding: 10px 5px; + border-bottom: 3px solid transparent; + transition: all 0.2s ease; +} + +.nav-tabs a:hover { + color: #0f172a; + border-bottom-color: #cbd5e1; +} + +.nav-tabs a.active { + color: #2563eb; /* A nice, sharp blue */ + border-bottom-color: #2563eb; +} + +/* The Filter Pills */ +.filter-pills { + display: flex; + gap: 12px; + margin-bottom: 24px; + flex-wrap: wrap; +} + +.pill { + display: inline-flex; + align-items: center; + text-decoration: none; + font-size: 0.9rem; + font-weight: 600; + padding: 6px 16px; + border-radius: 999px; /* Perfect pill shape */ + background-color: #f1f5f9; + color: #475569; + border: 1px solid #cbd5e1; + transition: all 0.2s ease; +} + +.pill:hover { + background-color: #e2e8f0; +} + +.pill.active { + background-color: #1e293b; + color: white; + border-color: #1e293b; +} + +/* The Metric Badges inside the Pills */ +.pill .badge { + background-color: white; + color: #000; + border-radius: 50%; + padding: 2px 8px; + font-size: 0.8rem; + margin-left: 8px; + font-weight: bold; +} + +/* Specific Pill Colors (When Active) */ +.pill-critical.active { background-color: #dc2626; border-color: #dc2626; color: white; } +.pill-critical.active .badge { color: #dc2626; } + +.pill-overdue.active { background-color: #ea580c; border-color: #ea580c; color: white; } +.pill-overdue.active .badge { color: #ea580c; } + +.pill-mine.active { background-color: #2563eb; border-color: #2563eb; color: white; } +.pill-mine.active .badge { color: #2563eb; } + +/* ========================================== + BULK ACTION MODAL + ========================================== */ +.modal-overlay { + position: fixed; top: 0; left: 0; width: 100%; height: 100%; + background: rgba(15, 23, 42, 0.6); /* Slate backdrop */ + display: flex; align-items: center; justify-content: center; + z-index: 1000; +} + +.modal-content { + background: white; padding: 24px; border-radius: 8px; + width: 400px; box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1); +} + +.modal-content h3 { margin-top: 0; color: #0f172a; } +.modal-content label { display: block; margin-bottom: 6px; font-weight: 600; font-size: 0.9rem; color: #475569; } +.modal-content select, .modal-content textarea { + width: 100%; padding: 10px; margin-bottom: 20px; + border: 1px solid #cbd5e1; border-radius: 4px; font-family: inherit; box-sizing: border-box; +} + +.modal-actions { display: flex; justify-content: flex-end; gap: 10px; } +.btn-secondary { background-color: #e2e8f0; color: #1e293b; } +.btn-secondary:hover { background-color: #cbd5e1; } +.false-positive { background: #f1f5f9; color: #475569; border: 1px dashed #cbd5e1; } + +/* ========================================== + ๐Ÿš€ THE SLIDE-OUT DRAWER & MODALS + ========================================== */ +.drawer-overlay { display: none; position: fixed; top: 0; left: 0; right: 0; bottom: 0; background: rgba(15, 23, 42, 0.4); z-index: 1000; backdrop-filter: blur(2px); } +.drawer { position: fixed; top: 0; right: -650px; width: 600px; height: 100vh; background: white; box-shadow: -4px 0 25px rgba(0,0,0,0.15); transition: right 0.3s cubic-bezier(0.4, 0, 0.2, 1); z-index: 1001; display: flex; flex-direction: column; } +.drawer.open { right: 0; } +.drawer-header { padding: 25px; border-bottom: 1px solid #e2e8f0; display: flex; justify-content: space-between; align-items: flex-start; background: #f8fafc; } +.drawer-body { padding: 25px; overflow-y: auto; flex: 1; } +.drawer-footer { padding: 20px 25px; border-top: 1px solid #e2e8f0; background: #f8fafc; display: flex; justify-content: flex-end; gap: 10px; } + +/* ========================================== + ๐Ÿš€ UNIFIED TABS & TOOLBARS + ========================================== */ +.tab-nav { display: flex; gap: 4px; margin-bottom: 0; padding-left: 20px; position: relative; z-index: 2; } +.tab-btn { padding: 10px 24px; cursor: pointer; text-decoration: none; background: #f8fafc; border: 1px solid #cbd5e1; font-size: 0.95rem; color: #64748b; font-weight: 600; border-radius: 6px 6px 0 0; margin-bottom: -1px; transition: background 0.2s, color 0.2s; } +.tab-btn:hover { background: #e2e8f0; color: #0f172a; } +.tab-btn.active { background: #ffffff; color: #2563eb; border-bottom: 1px solid #ffffff; z-index: 3; } +.tab-pane { display: none; background: #ffffff; border: 1px solid #cbd5e1; border-radius: 8px; box-shadow: 0 1px 3px rgba(0,0,0,0.1); padding: 20px; position: relative; z-index: 1; min-height: 400px; } +.tab-pane.active { display: block; } +.unified-card { background: white; border: 1px solid #cbd5e1; border-radius: 8px; box-shadow: 0 4px 6px rgba(0,0,0,0.05); position: relative; z-index: 2; overflow: hidden; } +.toolbar { display: flex; justify-content: space-between; align-items: center; padding: 12px 20px; background: #f8fafc; border-bottom: 1px solid #e2e8f0; } + +/* ========================================== + ๐Ÿ—‚๏ธ TREE BRANCHES & SCROLLING CSS + ========================================== */ +.asset-header-row td { transition: background 0.2s; } +.asset-header-row:hover td { background: #f1f5f9 !important; } +.nested-table tr td { background: #ffffff; border-top: 1px solid #e2e8f0; border-bottom: 1px solid #e2e8f0; padding: 12px; } +.nested-table tr td:first-child { border-left: 1px solid #e2e8f0; border-top-left-radius: 8px; border-bottom-left-radius: 8px; position: relative; } +.nested-table tr td:last-child { border-right: 1px solid #e2e8f0; border-top-right-radius: 8px; border-bottom-right-radius: 8px; } +.nested-table tr td:first-child::before { content: ""; position: absolute; left: -25px; top: 50%; width: 25px; height: 3px; background: #0f172a; } +.scroll-container::-webkit-scrollbar { width: 8px; } +.scroll-container::-webkit-scrollbar-track { background: #f1f5f9; border-radius: 4px; } +.scroll-container::-webkit-scrollbar-thumb { background: #94a3b8; border-radius: 4px; } +.scroll-container::-webkit-scrollbar-thumb:hover { background: #64748b; } + +@keyframes pulse { + 0% { box-shadow: 0 0 0 0 rgba(239, 68, 68, 0.7); } + 70% { box-shadow: 0 0 0 6px rgba(239, 68, 68, 0); } + 100% { box-shadow: 0 0 0 0 rgba(239, 68, 68, 0); } +} + +/* ========================================== + ๐Ÿ“Š KPI CARDS (Sheriff Dashboard) + ========================================== */ +.kpi-card { + display: block; + text-decoration: none; + color: inherit; + transition: transform 0.2s, box-shadow 0.2s; + border-radius: 8px; + background: white; + padding: 15px; + box-shadow: 0 1px 3px rgba(0,0,0,0.1); + border: 1px solid var(--border); +} + +.kpi-card:hover { + transform: translateY(-3px); + box-shadow: 0 4px 6px rgba(0,0,0,0.1); + cursor: pointer; +} + +/* ========================================== + ๐Ÿ”’ AUTH PAGES (Login & Register) + ========================================== */ +.auth-card { background: white; padding: 40px; border-radius: 8px; box-shadow: 0 4px 6px rgba(0,0,0,0.1); width: 100%; max-width: 400px; text-align: center; } +.auth-card.dark { background: #1e293b; box-shadow: 0 10px 25px rgba(0,0,0,0.5); max-width: 450px; border: 1px solid #334155; } +.auth-card input { width: 100%; padding: 10px; margin: 10px 0 20px 0; border: 1px solid #cbd5e1; border-radius: 4px; box-sizing: border-box; } +.auth-card.dark input { border: 1px solid #475569; background: #0f172a; color: white; } +.auth-card .btn { width: 100%; padding: 12px; font-weight: bold; cursor: pointer; font-size: 1rem; } +.error { color: #dc2626; margin-bottom: 15px; font-size: 0.9rem; display: none; } +.error.dark { color: #ef4444; } + +/* ========================================== + ๐Ÿ“Š ASSET ROLLUP VIEW + ========================================== */ +.count-badge { padding: 4px 8px; border-radius: 999px; font-weight: bold; font-size: 0.85rem; display: inline-block; min-width: 20px; text-align: center; } +.bg-critical { background: #fee2e2; color: #dc2626; } +.bg-high { background: #ffedd5; color: #ea580c; } +.bg-medium { background: #fef9c3; color: #ca8a04; } +.bg-low { background: #e0f2fe; color: #0284c7; } +.bg-zero { background: #f1f5f9; color: #94a3b8; font-weight: normal; } + +.navbar-strategic { background: #0f172a; color: white; padding: 15px 30px; display: flex; justify-content: space-between; align-items: center; } +.navbar-strategic a { color: #cbd5e1; text-decoration: none; margin-left: 20px; font-weight: 500; } +.navbar-strategic a:hover { color: white; } + +.notification-bubble { + background: #e11d48; + color: white; + border-radius: 10px; + padding: 2px 8px; + font-size: 0.75rem; + margin-left: 8px; + font-weight: bold; + box-shadow: 0 2px 4px rgba(225, 29, 72, 0.3); +} + + +.archived-row { + background-color: #fafafa !important; + opacity: 0.8; +} + +.archived-row td { + color: #64748b; +} + +.archived-row .badge { + filter: grayscale(0.5) opacity(0.7); +} + +.archived-row a { + color: #64748b !important; +} + +.archive-badge { + padding: 4px 10px; + border-radius: 6px; + font-size: 0.7rem; + font-weight: 800; + text-transform: uppercase; + display: inline-block; +} \ No newline at end of file diff --git a/ui/templates/adapter_builder.gohtml b/ui/templates/adapter_builder.gohtml new file mode 100644 index 0000000..9b61b5c --- /dev/null +++ b/ui/templates/adapter_builder.gohtml @@ -0,0 +1,101 @@ +{{define "content"}} + + +
+
+

๐Ÿ”ง Adapter Builder: {{.Filename}}

+
+ +
+
+
+

1. Data Preview

+ + +
+
+
+

Click "Load" to generate a data preview and extract column headers.

+
+
+
+
+ +
+

2. Schema Mapping

+ +
+
+
+ + +
+
+ + +
+
+ +
+ + +
+ +
+

Map to Core Fields

+ + + + + + + + + + + + + + + + + + + + + + + + +
Title *
Asset *
Severity *
Description
Remediation
+ +
+ +
+
+ + +
+
+
+
+
+ +{{end}} \ No newline at end of file diff --git a/ui/templates/admin.gohtml b/ui/templates/admin.gohtml new file mode 100644 index 0000000..d31a199 --- /dev/null +++ b/ui/templates/admin.gohtml @@ -0,0 +1,28 @@ +{{define "content"}} + +
+
+

The Sheriff's Office

+

Strategic Command, Personnel, & Operations

+
+
+ +
+ + + + + +
+ + {{template "admin_metrics" .}} + {{template "admin_performance" .}} + {{template "admin_config" .}} + {{template "admin_feed" .}} + {{template "admin_modals" .}} + + + +{{end}} \ No newline at end of file diff --git a/ui/templates/assets.gohtml b/ui/templates/assets.gohtml new file mode 100644 index 0000000..57ed679 --- /dev/null +++ b/ui/templates/assets.gohtml @@ -0,0 +1,56 @@ +{{define "content"}} + + + +
+
+
+

Asset Risk Rollup

+

Tracking {{.TotalCount}} vulnerable assets across the ranch.

+
+
+ +
+ + + + + + + + + + + + + {{range .Assets}} + + + + + + + + + {{else}} + + + + {{end}} + +
Asset IdentifierTotal ActiveCriticalHighMediumLow
+ + {{.AssetIdentifier}} + + {{.TotalActive}}{{.Critical}}{{.High}}{{.Medium}}{{.Low}}
+ No vulnerable assets found. The ranch is secure! +
+
+
+{{end}} \ No newline at end of file diff --git a/ui/templates/base.gohtml b/ui/templates/base.gohtml new file mode 100644 index 0000000..ef5b6b4 --- /dev/null +++ b/ui/templates/base.gohtml @@ -0,0 +1,36 @@ +{{define "base"}} + + + + + RiskRancher OSS + + + +
+ + +
+ + +
+ {{template "content" .}} +
+ + + +{{end}} \ No newline at end of file diff --git a/ui/templates/components/admin_config.gohtml b/ui/templates/components/admin_config.gohtml new file mode 100644 index 0000000..db27bbc --- /dev/null +++ b/ui/templates/components/admin_config.gohtml @@ -0,0 +1,112 @@ +{{define "admin_config"}} +
+
+
+
+

๐Ÿค  Personnel

+ +
+ + + + {{range .Users}} + + + + + + {{end}} + +
NameRoleActions
{{.FullName}}
{{.Email}}
{{.GlobalRole}} + + + +
+
+ +
+
+
+

๐Ÿ›ค๏ธ Source Routing

+ +
+
+

Automate ticket assignment and triage based on asset tags or CVEs.

+ Learn about RiskRancher Pro → +
+
+
+
+ +
+ +
+
+
+

โฑ๏ธ SLA Policies & System Time

+

Locked to Standard FedRAMP/NIST Default Timeframes

+
+ +
+
+
+

Base Configuration

+ + +
+
+
+
+
+
+
+

SLA Matrix (Days to Patch)

+
+ + + + + + + + +
SeverityTriagePatch
Critical13
High314
Medium730
Low1490
+
+
+
+ +
+

โš™๏ธ Operations

+ +
+ +
+ + +
+
+ +
+ + โฌ‡๏ธ Export JSON State +
+ +
+
+ Core Engine: {{.Version}} ({{.Commit}}) + +
+
+
+
+
+{{end}} \ No newline at end of file diff --git a/ui/templates/components/admin_feed.gohtml b/ui/templates/components/admin_feed.gohtml new file mode 100644 index 0000000..1a6686c --- /dev/null +++ b/ui/templates/components/admin_feed.gohtml @@ -0,0 +1,33 @@ +{{define "admin_feed"}} +
+
+
+

๐Ÿ“ป System Logs

+

Real-time tamper-evident system audit log.

+
+
+ +
+
+ +
+
Loading logs...
+
+ +
+ Showing 0 of 0 +
+ + +
+
+
+{{end}} \ No newline at end of file diff --git a/ui/templates/components/admin_metrics.gohtml b/ui/templates/components/admin_metrics.gohtml new file mode 100644 index 0000000..c7514ca --- /dev/null +++ b/ui/templates/components/admin_metrics.gohtml @@ -0,0 +1,80 @@ +{{define "admin_metrics"}} +
+
+ +

Active CISA KEVs

+

{{.Analytics.ActiveKEVs}}

+
+ +

Open Criticals

+

{{.Analytics.OpenCriticals}}

+
+ +

SLA Breaches

+

{{.Analytics.TotalOverdue}}

+
+
+

Global MTTR (Days)

+

{{.Analytics.GlobalMTTRDays}}

+
+
+ +
+
+

๐Ÿ”ด Open Risk Profile ({{.Analytics.Severity.Total}})

+
+ {{if gt .Analytics.Severity.Total 0}} +
+
+
+
+
+ {{else}} +
No Open Findings
+ {{end}} +
+
+
Crit
{{.Analytics.Severity.Critical}}
+
High
{{.Analytics.Severity.High}}
+
Med
{{.Analytics.Severity.Medium}}
+
Low
{{.Analytics.Severity.Low}}
+
+
+ +
+

๐ŸŸข Resolution Profile (Closed) ({{.Analytics.Resolution.Total}})

+
+ {{if gt .Analytics.Resolution.Total 0}} +
+
+
+ {{else}} +
No Closed Findings
+ {{end}} +
+
+
Patched
{{.Analytics.Resolution.Patched}}
+
Accepted
{{.Analytics.Resolution.RiskAccepted}}
+
False Pos
{{.Analytics.Resolution.FalsePositive}}
+
+
+ +
+

๐ŸŽฏ Top Vulnerable Assets

+ {{range .Analytics.TopAssets}} + +
+ {{.Asset}} + {{.Count}} Findings +
+
+
+
+
+ {{else}} +

No vulnerable assets found.

+ {{end}} +
+
+
+{{end}} \ No newline at end of file diff --git a/ui/templates/components/admin_modals.gohtml b/ui/templates/components/admin_modals.gohtml new file mode 100644 index 0000000..ca45b67 --- /dev/null +++ b/ui/templates/components/admin_modals.gohtml @@ -0,0 +1,25 @@ +{{define "admin_modals"}} + + +{{end}} \ No newline at end of file diff --git a/ui/templates/components/admin_performance.gohtml b/ui/templates/components/admin_performance.gohtml new file mode 100644 index 0000000..0977822 --- /dev/null +++ b/ui/templates/components/admin_performance.gohtml @@ -0,0 +1,114 @@ +{{define "admin_performance"}} +
+
+
+

๐Ÿ“ก Integration Performance

+

Diagnostic breakdown of global KPIs, bottlenecks, and SLA tracking by scanner source.

+
+
+ + + + + + + + + + + + + + {{range .Analytics.SourceHealth}} + + + + + + + + + + + + + +{{else}} + +{{end}} + +
Integration๐Ÿ”ฅ High Risk Drivers
(Ties to KPI Metrics)
๐Ÿ•’ Analyst Backlog
(Triage Phase)
โณ IT Bottlenecks
(Patch Phase)
๐Ÿ›ก๏ธ Resolution Hygiene
(Closed Profile)
Strategic Insight
+ {{.Source}}
+ {{.TotalOpen}} Total Open +
+
+ {{if gt .Criticals 0}}{{.Criticals}} CRIT{{end}} + {{if gt .CisaKEVs 0}}{{.CisaKEVs}} KEV{{end}} + {{if and (eq .Criticals 0) (eq .CisaKEVs 0)}}-{{end}} +
+
+ {{if gt .Untriaged 0}} + {{.Untriaged}} Pending + {{else}} + โœ“ Clear + {{end}} + + {{if gt .PatchOverdue 0}} + {{.PatchOverdue}} Overdue + {{else if gt .PendingRisk 0}} + {{.PendingRisk}} Excepted + {{else}} + โœ“ Met + {{end}} + +
{{.TotalClosed}} Closed
+
+ {{if gt .Patched 0}}โœ“ {{.Patched}}{{end}} + {{if gt .RiskAccepted 0}}โš–๏ธ {{.RiskAccepted}}{{end}} + {{if gt .FalsePositive 0}}๐Ÿšซ {{.FalsePositive}}{{end}} + {{if eq .TotalClosed 0}}-{{end}} +
+
+
{{.StrategicNote}}
+
Lead: {{.TopAssignee}}
+
No active sources found.
+ +
+
+

๐Ÿ”Œ Recent Sync History

+

Operational ledger of all API pushes, webhooks, and CSV uploads.

+
+
+ + + + + + + + + + + + + {{range .SyncLogs}} + + + + + + + + {{else}} + + {{end}} + +
TimestampSourceStatusRecords ProcessedDiagnostics
{{.CreatedAt}}{{.Source}} + {{if eq .Status "Success"}} + โœ“ Success + {{else}} + โœ— Failed + {{end}} + {{.RecordsProcessed}}{{.ErrorMessage}}
No syncs recorded yet.
+
+{{end}} \ No newline at end of file diff --git a/ui/templates/components/dash_modals.gohtml b/ui/templates/components/dash_modals.gohtml new file mode 100644 index 0000000..a36f254 --- /dev/null +++ b/ui/templates/components/dash_modals.gohtml @@ -0,0 +1,126 @@ +{{define "dash_modals"}} + + + + +
+
+
+
+ +

+
+
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+
+ + +
+ +
+ + +
+ +
+
+ + +
+
+ + +
+
+ +
+ + +
+
+
+ + +
+{{end}} \ No newline at end of file diff --git a/ui/templates/components/dash_table.gohtml b/ui/templates/components/dash_table.gohtml new file mode 100644 index 0000000..ba14571 --- /dev/null +++ b/ui/templates/components/dash_table.gohtml @@ -0,0 +1,108 @@ +{{define "dash_table"}} +
+
+
+

Asset Triage Queue

+
+ + {{if eq .CurrentTab "archives"}} + + {{end}} + +
+ + ๐Ÿ“ฅ Import Data + +
+
+ + {{if .CurrentAsset}} +
+
Asset Filter Active: Showing tickets for {{.CurrentAsset}}
+ Clear Filter +
+ {{end}} + + + + + + + + + + + {{if ne .CurrentTab "holding_pen"}}{{end}} + + + + {{range .Tickets}} + + + + + + + + {{if eq $.CurrentTab "holding_pen"}} + {{else if eq $.CurrentTab "chute"}} + + {{else if eq $.CurrentTab "archives"}} + + {{end}} + + {{end}} + +
{{.Severity}}{{.Source}} + + + + + + + + + + {{.Title}} + + {{if eq .Status "Returned to Security"}} +
+ ๐Ÿ”„ Returned by IT: {{.LatestComment}} +
+ {{end}} +
+ {{if eq .Assignee "Unassigned"}} + Unassigned + {{else}} + {{.Assignee}} + {{end}} + + {{if eq $.CurrentTab "holding_pen"}} + + {{else}} + {{if .IsOverdue}}{{.SLAString}}{{else}}{{.SLAString}}{{end}} + {{end}} + + {{if eq .Status "Patched"}} + โœ… RESOLVED + {{else if eq .Status "False Positive"}} + ๐Ÿ‘ป IGNORED + {{end}} +
+ Archived {{.UpdatedAt.Format "Jan 02"}} (Took {{.SLAString}}) +
+
+ + +
+{{end}} \ No newline at end of file diff --git a/ui/templates/components/dash_tabs.gohtml b/ui/templates/components/dash_tabs.gohtml new file mode 100644 index 0000000..e3ed866 --- /dev/null +++ b/ui/templates/components/dash_tabs.gohtml @@ -0,0 +1,19 @@ +{{define "dash_tabs"}} +
+ + ๐Ÿ“ฉ Holding Pen + {{if gt .ReturnedCount 0}} + {{.ReturnedCount}} + {{end}} + + ๐Ÿค  The Chute (Assigned) + + + ๐Ÿ”’ Pending Verification (Pro) + + + + ๐Ÿ—„๏ธ The Archives + +
+{{end}} \ No newline at end of file diff --git a/ui/templates/dashboard.gohtml b/ui/templates/dashboard.gohtml new file mode 100644 index 0000000..35a924c --- /dev/null +++ b/ui/templates/dashboard.gohtml @@ -0,0 +1,17 @@ +{{define "content"}} + + + + {{template "dash_tabs" .}} + +
+ {{template "dash_table" .}} +
+ + {{template "dash_modals" .}} + + + +{{end}} \ No newline at end of file diff --git a/ui/templates/ingest.gohtml b/ui/templates/ingest.gohtml new file mode 100644 index 0000000..bbb9f8e --- /dev/null +++ b/ui/templates/ingest.gohtml @@ -0,0 +1,41 @@ +{{define "content"}} +
+

๐Ÿ“ฅ Data Ingestion & Parsers

+

Bring your findings into the ranch.

+ +
+
+
๐Ÿ“
+

Pentest Report Parser

+

Upload a Word (DOCX) or PDF penetration test report. We'll extract the findings and map them to tickets.

+ Upload Report +
+ +
+
๐Ÿ› ๏ธ
+

Custom Adapter Builder

+

Using a proprietary scanner? Build a visual JSON mapping to seamlessly ingest its outputs.

+ Build New Adapter +
+
+ +
+

๐Ÿ“ก Standard Scanner Ingestion

+
+ + + + + + + +
+ +
+
+ +{{end}} \ No newline at end of file diff --git a/ui/templates/login.gohtml b/ui/templates/login.gohtml new file mode 100644 index 0000000..44e4374 --- /dev/null +++ b/ui/templates/login.gohtml @@ -0,0 +1,37 @@ +{{define "login"}} + + + + + RiskRancher - Login + + + + +
+

๐Ÿด RiskRancher

+

Sign in to your SOC Dashboard

+ +
+ +
+
+ + + + + +
+ +
+ +
+ First time setup? Initialize System +
+
+ + + + + +{{end}} \ No newline at end of file diff --git a/ui/templates/register.gohtml b/ui/templates/register.gohtml new file mode 100644 index 0000000..822d1e7 --- /dev/null +++ b/ui/templates/register.gohtml @@ -0,0 +1,42 @@ +{{define "register"}} + + + + + RiskRancher - System Initialization + + + + +
+

๐Ÿ›ก๏ธ Initialize System

+

+ Welcome to RiskRancher. The first user to register will automatically be granted the Sheriff (Global Admin) role. +

+ +
+ +
+
+ + + + + + + + +
+ +
+ +
+ System already initialized? Login here +
+
+ + + + + +{{end}} \ No newline at end of file diff --git a/ui/templates/report_parser.gohtml b/ui/templates/report_parser.gohtml new file mode 100644 index 0000000..be14c81 --- /dev/null +++ b/ui/templates/report_parser.gohtml @@ -0,0 +1,37 @@ +{{define "content"}} +
+
+
+

๐Ÿ“ Manual Pentest Parser

+

Highlight text in the DOCX viewer to extract vulnerabilities.

+
+
+ +
+
+
+

๐Ÿ“„ Document Viewer

+
+
+ {{.RenderedHTML}} +
+
+ +
+
+

๐Ÿ“‹ Draft Findings

+ +
+
+
+ No drafts yet.

Highlight text on the left to begin clipping. +
+
+
+
+
+ + + + +{{end}} \ No newline at end of file diff --git a/ui/templates/report_upload.gohtml b/ui/templates/report_upload.gohtml new file mode 100644 index 0000000..16db470 --- /dev/null +++ b/ui/templates/report_upload.gohtml @@ -0,0 +1,56 @@ +{{define "content"}} +
+
+

๐Ÿ“„ Upload Pentest Report

+

Upload a .docx manual assessment to enter the clipping parser.

+ +
+

Drag & Drop DOCX Here

+

or click to browse your computer

+ +
+ + +
+
+ + +{{end}} \ No newline at end of file diff --git a/ui/ui.go b/ui/ui.go new file mode 100644 index 0000000..d1d4c11 --- /dev/null +++ b/ui/ui.go @@ -0,0 +1,345 @@ +package ui + +import ( + "bytes" + "embed" + "html/template" + "io/fs" + "log" + "math" + "net/http" + "net/http/httptest" + "path/filepath" + "strconv" + "strings" + + "epigas.gitea.cloud/RiskRancher/core/pkg/auth" + "epigas.gitea.cloud/RiskRancher/core/pkg/domain" + "epigas.gitea.cloud/RiskRancher/core/pkg/report" +) + +//go:embed templates/* templates/components/* static/* +var CoreUIFS embed.FS + +var ( + AppVersion = "dev" + AppCommit = "none" +) + +var CoreTemplates *template.Template +var Pages map[string]*template.Template + +// SetVersionInfo is called by main.go on startup to inject ldflags +func SetVersionInfo(version, commit string) { + AppVersion = version + AppCommit = commit +} + +func init() { + funcMap := template.FuncMap{"lower": strings.ToLower} + Pages = make(map[string]*template.Template) + + var err error + + CoreTemplates, err = template.New("").Funcs(funcMap).ParseFS(CoreUIFS, "templates/*.gohtml", "templates/components/*.gohtml") + if err != nil && !strings.Contains(err.Error(), "pattern matches no files") { + log.Printf("Warning: Failed to parse master core templates: %v", err) + } + + dashTmpl := template.New("").Funcs(funcMap) + dashTmpl, err = dashTmpl.ParseFS(CoreUIFS, "templates/base.gohtml", "templates/dashboard.gohtml", "templates/components/*.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse dashboard shell. Err: %v", err) + } + Pages["dashboard"] = dashTmpl + + adminTmpl := template.New("").Funcs(funcMap) + adminTmpl, err = adminTmpl.ParseFS(CoreUIFS, "templates/base.gohtml", "templates/admin.gohtml", "templates/components/*.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse admin shell. Err: %v", err) + } + Pages["admin"] = adminTmpl + + Pages["login"], err = template.New("").Funcs(funcMap).ParseFS(CoreUIFS, "templates/login.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse login. Err: %v", err) + } + + Pages["register"], err = template.New("").Funcs(funcMap).ParseFS(CoreUIFS, "templates/register.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse register. Err: %v", err) + } + + Pages["assets"], err = template.New("").Funcs(funcMap).ParseFS(CoreUIFS, "templates/base.gohtml", "templates/assets.gohtml", "templates/components/*.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse assets. Err: %v", err) + } + + ingestTmpl := template.New("").Funcs(funcMap) + ingestTmpl, err = ingestTmpl.ParseFS(CoreUIFS, "templates/base.gohtml", "templates/ingest.gohtml", "templates/components/*.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse ingest shell. Err: %v", err) + } + Pages["ingest"] = ingestTmpl + + adapterTmpl := template.New("").Funcs(funcMap) + adapterTmpl, err = adapterTmpl.ParseFS(CoreUIFS, "templates/base.gohtml", "templates/adapter_builder.gohtml", "templates/components/*.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse adapter builder shell. Err: %v", err) + } + Pages["adapter_builder"] = adapterTmpl + + uploadTmpl := template.New("").Funcs(funcMap) + uploadTmpl, err = uploadTmpl.ParseFS(CoreUIFS, "templates/base.gohtml", "templates/report_upload.gohtml", "templates/components/*.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse report upload template. Err: %v", err) + } + Pages["report_upload"] = uploadTmpl + + parserTmpl := template.New("").Funcs(funcMap) + parserTmpl, err = parserTmpl.ParseFS(CoreUIFS, "templates/base.gohtml", "templates/report_parser.gohtml", "templates/components/*.gohtml") + if err != nil { + log.Fatalf("FATAL: Failed to parse report parser template. Err: %v", err) + } + Pages["report_parser"] = parserTmpl +} + +func StaticHandler() http.Handler { + staticFS, err := fs.Sub(CoreUIFS, "static") + if err != nil { + log.Fatal("Failed to load embedded static files:", err) + } + return http.StripPrefix("/static/", http.FileServer(http.FS(staticFS))) +} + +type PageData struct { + Tickets any + CurrentTab string + CurrentFilter string + CurrentAsset string + ReturnedCount int + CountCritical int + CountOverdue int + CountMine int + CurrentPage int + TotalPages int + NextPage int + PrevPage int + CountVerification int + HasNext bool + HasPrev bool + Version string + Commit string +} + +func HandleDashboard(store domain.Store) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + userIDVal := r.Context().Value(auth.UserIDKey) + if userIDVal == nil { + http.Redirect(w, r, "/login", http.StatusSeeOther) + return + } + + userID := userIDVal.(int) + user, err := store.GetUserByID(r.Context(), userID) + if err != nil { + http.Redirect(w, r, "/login", http.StatusSeeOther) + return + } + + if user.GlobalRole == "Sheriff" { + http.Redirect(w, r, "/admin", http.StatusSeeOther) + return + } + + currentUserEmail := user.Email + currentUserRole := user.GlobalRole + + tab := r.URL.Query().Get("tab") + if tab == "" { + tab = "holding_pen" + } + + statusFilter := tab + if tab == "holding_pen" { + statusFilter = "Waiting to be Triaged" + } else if tab == "chute" { + statusFilter = "Assigned Out" + } else if tab == "verification" { + statusFilter = "Pending Verification" + } + + filter := r.URL.Query().Get("filter") + assetFilter := r.URL.Query().Get("asset") + + pageStr := r.URL.Query().Get("page") + page, _ := strconv.Atoi(pageStr) + if page < 1 { + page = 1 + } + limit := 50 + offset := (page - 1) * limit + + tickets, totalRecords, metrics, err := store.GetDashboardTickets( + r.Context(), statusFilter, filter, assetFilter, currentUserEmail, currentUserRole, limit, offset, + ) + + if err != nil { + http.Error(w, "Database query error: "+err.Error(), http.StatusInternalServerError) + return + } + + totalPages := int(math.Ceil(float64(totalRecords) / float64(limit))) + if totalPages == 0 { + totalPages = 1 + } + + data := PageData{ + Tickets: tickets, + CurrentTab: tab, + CurrentFilter: filter, + CurrentAsset: assetFilter, + ReturnedCount: metrics["returned"], + CountCritical: metrics["critical"], + CountOverdue: metrics["overdue"], + CountMine: metrics["mine"], + CountVerification: metrics["verification"], + CurrentPage: page, + TotalPages: totalPages, + NextPage: page + 1, + PrevPage: page - 1, + HasNext: page < totalPages, + HasPrev: page > 1, + Version: AppVersion, + Commit: AppCommit, + } + + var buf bytes.Buffer + if err := Pages["dashboard"].ExecuteTemplate(&buf, "base", data); err != nil { + http.Error(w, "Template rendering error: "+err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "text/html; charset=utf-8") + buf.WriteTo(w) + } +} + +func HandleLoginUI() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + if err := Pages["login"].ExecuteTemplate(w, "login", nil); err != nil { + http.Error(w, "Template render error: "+err.Error(), http.StatusInternalServerError) + } + } +} + +func HandleRegisterUI() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + if err := Pages["register"].ExecuteTemplate(w, "register", nil); err != nil { + http.Error(w, "Template render error: "+err.Error(), http.StatusInternalServerError) + } + } +} + +func HandleAdminDashboard(store domain.Store) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + users, _ := store.GetAllUsers(r.Context()) + config, _ := store.GetAppConfig(r.Context()) + slas, _ := store.GetSLAPolicies(r.Context()) + adapters, _ := store.GetAdapters(r.Context()) + + analytics, _ := store.GetSheriffAnalytics(r.Context()) + activityFeed, _ := store.GetGlobalActivityFeed(r.Context(), 15) + syncLogs, _ := store.GetRecentSyncLogs(r.Context(), 10) + + data := map[string]any{ + "Users": users, + "Config": config, + "SLAs": slas, + "Adapters": adapters, + "Analytics": analytics, + "Feed": activityFeed, + "SyncLogs": syncLogs, + "Version": AppVersion, + "Commit": AppCommit, + } + + var buf bytes.Buffer + if err := Pages["admin"].ExecuteTemplate(&buf, "base", data); err != nil { + http.Error(w, "Template render error: "+err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "text/html; charset=utf-8") + buf.WriteTo(w) + } +} + +func HandleIngestUI(store domain.Store) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + + adapters, _ := store.GetAdapters(r.Context()) + data := map[string]any{ + "Adapters": adapters, + "Version": AppVersion, + "Commit": AppCommit, + } + + if err := Pages["ingest"].ExecuteTemplate(w, "base", data); err != nil { + http.Error(w, "Template render error: "+err.Error(), http.StatusInternalServerError) + } + } +} + +func HandleAdapterBuilderUI(store domain.Store) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + data := map[string]any{ + "Filename": r.URL.Query().Get("filename"), + "Version": AppVersion, + "Commit": AppCommit, + } + if err := Pages["adapter_builder"].ExecuteTemplate(w, "base", data); err != nil { + http.Error(w, "Template render error: "+err.Error(), http.StatusInternalServerError) + } + } +} + +func HandleParserUI(store domain.Store) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + reportID := r.PathValue("id") + + filePath := filepath.Join(report.UploadDir, reportID) + recorder := httptest.NewRecorder() + report.ServeDOCXAsHTML(recorder, filePath) + safeHTML := template.HTML(recorder.Body.String()) + + data := map[string]any{ + "ReportID": reportID, + "RenderedHTML": safeHTML, + "Version": AppVersion, + "Commit": AppCommit, + } + + if err := Pages["report_parser"].ExecuteTemplate(w, "base", data); err != nil { + http.Error(w, "Template render error: "+err.Error(), http.StatusInternalServerError) + } + } +} + +func HandlePentestUploadUI(store domain.Store) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + data := map[string]any{ + "Version": AppVersion, + "Commit": AppCommit, + } + if err := Pages["report_upload"].ExecuteTemplate(w, "base", data); err != nil { + http.Error(w, "Template render error: "+err.Error(), http.StatusInternalServerError) + } + } +} diff --git a/uploads/pentest-report-bridgefy.docx b/uploads/pentest-report-bridgefy.docx new file mode 100644 index 0000000..eff63e4 Binary files /dev/null and b/uploads/pentest-report-bridgefy.docx differ