First release of open core
This commit is contained in:
113
pkg/report/docx_html.go
Normal file
113
pkg/report/docx_html.go
Normal file
@@ -0,0 +1,113 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Relationships maps the pkg rId to the actual media file
|
||||
type Relationships struct {
|
||||
XMLName xml.Name `xml:"Relationships"`
|
||||
Rel []struct {
|
||||
Id string `xml:"Id,attr"`
|
||||
Target string `xml:"Target,attr"`
|
||||
} `xml:"Relationship"`
|
||||
}
|
||||
|
||||
func ServeDOCXAsHTML(w http.ResponseWriter, docxPath string) {
|
||||
r, err := zip.OpenReader(docxPath)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to open DOCX archive", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
relsMap := make(map[string]string)
|
||||
for _, f := range r.File {
|
||||
if f.Name == "word/_rels/document.xml.rels" {
|
||||
rc, _ := f.Open()
|
||||
var rels Relationships
|
||||
xml.NewDecoder(rc).Decode(&rels)
|
||||
rc.Close()
|
||||
for _, rel := range rels.Rel {
|
||||
relsMap[rel.Id] = rel.Target
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
mediaMap := make(map[string]string)
|
||||
for _, f := range r.File {
|
||||
if strings.HasPrefix(f.Name, "word/media/") {
|
||||
rc, _ := f.Open()
|
||||
data, _ := io.ReadAll(rc)
|
||||
rc.Close()
|
||||
|
||||
ext := strings.TrimPrefix(filepath.Ext(f.Name), ".")
|
||||
if ext == "jpeg" || ext == "jpg" {
|
||||
ext = "jpeg"
|
||||
}
|
||||
b64 := base64.StdEncoding.EncodeToString(data)
|
||||
mediaMap[f.Name] = fmt.Sprintf("data:image/%s;base64,%s", ext, b64)
|
||||
}
|
||||
}
|
||||
|
||||
var htmlOutput bytes.Buffer
|
||||
var inParagraph bool
|
||||
|
||||
for _, f := range r.File {
|
||||
if f.Name == "word/document.xml" {
|
||||
rc, _ := f.Open()
|
||||
decoder := xml.NewDecoder(rc)
|
||||
|
||||
for {
|
||||
token, err := decoder.Token()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
switch se := token.(type) {
|
||||
case xml.StartElement:
|
||||
if se.Name.Local == "p" {
|
||||
htmlOutput.WriteString("<p style='margin-bottom: 10px;'>")
|
||||
inParagraph = true
|
||||
}
|
||||
if se.Name.Local == "t" {
|
||||
var text string
|
||||
decoder.DecodeElement(&text, &se)
|
||||
htmlOutput.WriteString(text)
|
||||
}
|
||||
if se.Name.Local == "blip" {
|
||||
for _, attr := range se.Attr {
|
||||
if attr.Name.Local == "embed" {
|
||||
targetPath := relsMap[attr.Value]
|
||||
fullMediaPath := "word/" + targetPath
|
||||
|
||||
if b64URI, exists := mediaMap[fullMediaPath]; exists {
|
||||
imgTag := fmt.Sprintf(`<br><img src="%s" style="max-width: 100%%; height: auto; border: 1px solid #cbd5e1; border-radius: 4px; margin: 15px 0; cursor: pointer;" class="pentest-img" title="Click to extract image"><br>`, b64URI)
|
||||
htmlOutput.WriteString(imgTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
case xml.EndElement:
|
||||
if se.Name.Local == "p" && inParagraph {
|
||||
htmlOutput.WriteString("</p>\n")
|
||||
inParagraph = false
|
||||
}
|
||||
}
|
||||
}
|
||||
rc.Close()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
w.Write(htmlOutput.Bytes())
|
||||
}
|
||||
107
pkg/report/drafts.go
Normal file
107
pkg/report/drafts.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"epigas.gitea.cloud/RiskRancher/core/pkg/auth"
|
||||
domain2 "epigas.gitea.cloud/RiskRancher/core/pkg/domain"
|
||||
)
|
||||
|
||||
func (h *Handler) HandleSaveDraft(w http.ResponseWriter, r *http.Request) {
|
||||
reportID := r.PathValue("id")
|
||||
|
||||
var draft domain2.DraftTicket
|
||||
if err := json.NewDecoder(r.Body).Decode(&draft); err != nil {
|
||||
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
draft.ReportID = reportID
|
||||
|
||||
if err := h.Store.SaveDraft(r.Context(), draft); err != nil {
|
||||
http.Error(w, "DB Error: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
}
|
||||
|
||||
func (h *Handler) HandleGetDrafts(w http.ResponseWriter, r *http.Request) {
|
||||
reportID := r.PathValue("id")
|
||||
|
||||
drafts, err := h.Store.GetDraftsByReport(r.Context(), reportID)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to get drafts", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(drafts)
|
||||
}
|
||||
|
||||
func (h *Handler) HandleDeleteDraft(w http.ResponseWriter, r *http.Request) {
|
||||
draftID := r.PathValue("draft_id")
|
||||
|
||||
if err := h.Store.DeleteDraft(r.Context(), draftID); err != nil {
|
||||
http.Error(w, "Failed to delete draft", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
|
||||
func (h *Handler) HandlePromoteDrafts(w http.ResponseWriter, r *http.Request) {
|
||||
reportIDStr := r.PathValue("id")
|
||||
if reportIDStr == "" {
|
||||
http.Error(w, "Invalid Report ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
userIDVal := r.Context().Value(auth.UserIDKey)
|
||||
if userIDVal == nil {
|
||||
http.Error(w, "Unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
user, err := h.Store.GetUserByID(r.Context(), userIDVal.(int))
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to identify user", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
analystEmail := user.Email
|
||||
|
||||
var payload []domain2.Ticket
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "Invalid JSON payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := h.Store.PromotePentestDrafts(r.Context(), reportIDStr, analystEmail, payload); err != nil {
|
||||
http.Error(w, "Database error during promotion: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
}
|
||||
|
||||
func (h *Handler) HandleUpdateDraft(w http.ResponseWriter, r *http.Request) {
|
||||
idStr := r.PathValue("id")
|
||||
draftID, err := strconv.Atoi(idStr)
|
||||
if err != nil {
|
||||
http.Error(w, "Invalid draft ID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var payload domain2.Ticket
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.Store.UpdateDraft(r.Context(), draftID, payload); err != nil {
|
||||
http.Error(w, "Failed to auto-save draft", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
13
pkg/report/handler.go
Normal file
13
pkg/report/handler.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"epigas.gitea.cloud/RiskRancher/core/pkg/domain"
|
||||
)
|
||||
|
||||
type Handler struct {
|
||||
Store domain.Store
|
||||
}
|
||||
|
||||
func NewHandler(store domain.Store) *Handler {
|
||||
return &Handler{Store: store}
|
||||
}
|
||||
57
pkg/report/parser.go
Normal file
57
pkg/report/parser.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ExtractJSONField traverses an unmarshaled JSON object using dot notation.
|
||||
func ExtractJSONField(data any, path string) string {
|
||||
if path == "" || data == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
parts := strings.Split(path, ".")
|
||||
current := data
|
||||
|
||||
for _, part := range parts {
|
||||
if current == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch v := current.(type) {
|
||||
case map[string]any:
|
||||
val, ok := v[part]
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
current = val
|
||||
|
||||
case []any:
|
||||
idx, err := strconv.Atoi(part)
|
||||
if err != nil || idx < 0 || idx >= len(v) {
|
||||
return ""
|
||||
}
|
||||
current = v[idx]
|
||||
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
if current == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch v := current.(type) {
|
||||
case string:
|
||||
return v
|
||||
case float64:
|
||||
return strconv.FormatFloat(v, 'f', -1, 64)
|
||||
case bool:
|
||||
return strconv.FormatBool(v)
|
||||
default:
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
}
|
||||
68
pkg/report/parser_test.go
Normal file
68
pkg/report/parser_test.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestExtractJSONField(t *testing.T) {
|
||||
semgrepRaw := []byte(`{
|
||||
"check_id": "crypto-bad-mac",
|
||||
"extra": {
|
||||
"severity": "WARNING",
|
||||
"message": "Use of weak MAC"
|
||||
}
|
||||
}`)
|
||||
var semgrep map[string]any
|
||||
json.Unmarshal(semgrepRaw, &semgrep)
|
||||
|
||||
trivyRaw := []byte(`{
|
||||
"VulnerabilityID": "CVE-2021-44228",
|
||||
"PkgName": "log4j-core",
|
||||
"Severity": "CRITICAL"
|
||||
}`)
|
||||
var trivy map[string]any
|
||||
json.Unmarshal(trivyRaw, &trivy)
|
||||
|
||||
openvasRaw := []byte(`{
|
||||
"name": "Cleartext Transmission",
|
||||
"host": {
|
||||
"details": [
|
||||
{"ip": "192.168.1.50"},
|
||||
{"ip": "10.0.0.5"}
|
||||
]
|
||||
},
|
||||
"threat": "High"
|
||||
}`)
|
||||
var openvas map[string]any
|
||||
json.Unmarshal(openvasRaw, &openvas)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
finding any
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{"Semgrep Flat", semgrep, "check_id", "crypto-bad-mac"},
|
||||
{"Semgrep Nested", semgrep, "extra.severity", "WARNING"},
|
||||
{"Semgrep Deep Nested", semgrep, "extra.message", "Use of weak MAC"},
|
||||
|
||||
{"Trivy Flat 1", trivy, "VulnerabilityID", "CVE-2021-44228"},
|
||||
{"Trivy Flat 2", trivy, "Severity", "CRITICAL"},
|
||||
|
||||
{"OpenVAS Flat", openvas, "threat", "High"},
|
||||
{"OpenVAS Array Index", openvas, "host.details.0.ip", "192.168.1.50"},
|
||||
|
||||
{"Missing Field", trivy, "does.not.exist", ""},
|
||||
{"Empty Path", trivy, "", ""},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result := ExtractJSONField(tc.finding, tc.path)
|
||||
if result != tc.expected {
|
||||
t.Errorf("Path '%s': expected '%s', got '%s'", tc.path, tc.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
131
pkg/report/reports.go
Normal file
131
pkg/report/reports.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var UploadDir = "./testdata"
|
||||
|
||||
// HandleUploadReport safely receives and stores the pentest file
|
||||
func (h *Handler) HandleUploadReport(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseMultipartForm(50 << 20); err != nil {
|
||||
http.Error(w, "Failed to parse form or file too large", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
file, header, err := r.FormFile("file")
|
||||
if err != nil {
|
||||
http.Error(w, "Missing 'file' field in upload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
cleanName := filepath.Base(header.Filename)
|
||||
if cleanName == "." || cleanName == "/" {
|
||||
cleanName = "uploaded_report.bin"
|
||||
}
|
||||
|
||||
os.MkdirAll(UploadDir, 0755)
|
||||
|
||||
destPath := filepath.Join(UploadDir, cleanName)
|
||||
destFile, err := os.Create(destPath)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to save file to disk", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer destFile.Close()
|
||||
|
||||
if _, err := io.Copy(destFile, file); err != nil {
|
||||
http.Error(w, "Error writing file", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
fmt.Fprintf(w, `{"file_id": "%s"}`, cleanName)
|
||||
}
|
||||
|
||||
// HandleViewReport streams the file to the iframe, converting DOCX if needed
|
||||
func (h *Handler) HandleViewReport(w http.ResponseWriter, r *http.Request) {
|
||||
fileID := r.PathValue("id")
|
||||
cleanName := filepath.Base(fileID)
|
||||
filePath := filepath.Join(UploadDir, cleanName)
|
||||
|
||||
if _, err := os.Stat(filePath); os.IsNotExist(err) {
|
||||
http.Error(w, "Report not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
ext := strings.ToLower(filepath.Ext(cleanName))
|
||||
|
||||
if ext == ".pdf" {
|
||||
w.Header().Set("Content-Type", "application/pdf")
|
||||
w.Header().Set("Content-Disposition", "inline; filename="+cleanName)
|
||||
http.ServeFile(w, r, filePath)
|
||||
return
|
||||
}
|
||||
|
||||
if ext == ".docx" {
|
||||
ServeDOCXAsHTML(w, filePath)
|
||||
return
|
||||
}
|
||||
|
||||
http.Error(w, "Unsupported file type. Please upload PDF or DOCX.", http.StatusBadRequest)
|
||||
}
|
||||
|
||||
func (h *Handler) HandleImageUpload(w http.ResponseWriter, r *http.Request) {
|
||||
var payload struct {
|
||||
Base64Data string `json:"image_data"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "Invalid JSON payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
parts := strings.SplitN(payload.Base64Data, ",", 2)
|
||||
if len(parts) != 2 {
|
||||
http.Error(w, "Invalid Base64 image format", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
ext := ".png"
|
||||
if strings.Contains(parts[0], "jpeg") || strings.Contains(parts[0], "jpg") {
|
||||
ext = ".jpg"
|
||||
}
|
||||
|
||||
rawBase64 := parts[1]
|
||||
imgBytes, err := base64.StdEncoding.DecodeString(rawBase64)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode Base64 data", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
randBytes := make([]byte, 8)
|
||||
rand.Read(randBytes)
|
||||
fileName := fmt.Sprintf("img_%x%s", randBytes, ext)
|
||||
|
||||
uploadDir := filepath.Join("data", "testdata", "images")
|
||||
if err := os.MkdirAll(uploadDir, 0755); err != nil {
|
||||
http.Error(w, "Failed to create directory structure", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
savePath := filepath.Join(uploadDir, fileName)
|
||||
if err := os.WriteFile(savePath, imgBytes, 0644); err != nil {
|
||||
http.Error(w, "Failed to save image to disk", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
publicURL := "/testdata/images/" + fileName
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{"url": publicURL})
|
||||
}
|
||||
126
pkg/report/reports_test.go
Normal file
126
pkg/report/reports_test.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package report
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"epigas.gitea.cloud/RiskRancher/core/pkg/datastore"
|
||||
"epigas.gitea.cloud/RiskRancher/core/pkg/domain"
|
||||
)
|
||||
|
||||
func setupTestReport(t *testing.T) (*Handler, *sql.DB) {
|
||||
db := datastore.InitDB(":memory:")
|
||||
store := datastore.NewSQLiteStore(db)
|
||||
return NewHandler(store), db
|
||||
}
|
||||
|
||||
func GetVIPCookie(store domain.Store) *http.Cookie {
|
||||
user, err := store.GetUserByEmail(context.Background(), "vip@RiskRancher.com")
|
||||
if err != nil {
|
||||
user, _ = store.CreateUser(context.Background(), "vip@RiskRancher.com", "Test VIP", "hash", "Sheriff")
|
||||
}
|
||||
|
||||
store.CreateSession(context.Background(), "vip_token_999", user.ID, time.Now().Add(1*time.Hour))
|
||||
return &http.Cookie{Name: "session_token", Value: "vip_token_999"}
|
||||
}
|
||||
|
||||
func TestUploadAndViewReports(t *testing.T) {
|
||||
h, db := setupTestReport(t)
|
||||
defer db.Close()
|
||||
|
||||
t.Run("1. Test PDF Upload and View", func(t *testing.T) {
|
||||
body := new(bytes.Buffer)
|
||||
writer := multipart.NewWriter(body)
|
||||
part, _ := writer.CreateFormFile("file", "test_report.pdf")
|
||||
part.Write([]byte("%PDF-1.4 Fake PDF Content"))
|
||||
writer.Close()
|
||||
|
||||
reqUp := httptest.NewRequest(http.MethodPost, "/api/reports/upload", body)
|
||||
reqUp.AddCookie(GetVIPCookie(h.Store))
|
||||
reqUp.Header.Set("Content-Type", writer.FormDataContentType())
|
||||
rrUp := httptest.NewRecorder()
|
||||
h.HandleUploadReport(rrUp, reqUp)
|
||||
|
||||
reqView := httptest.NewRequest(http.MethodGet, "/api/reports/view/test_report.pdf", nil)
|
||||
reqView.AddCookie(GetVIPCookie(h.Store))
|
||||
reqView.SetPathValue("id", "test_report.pdf")
|
||||
rrView := httptest.NewRecorder()
|
||||
h.HandleViewReport(rrView, reqView)
|
||||
|
||||
if rrView.Code != http.StatusOK {
|
||||
t.Fatalf("Expected 200 OK for PDF View, got %d", rrView.Code)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("2. Test DOCX to HTML", func(t *testing.T) {
|
||||
buf := new(bytes.Buffer)
|
||||
zipWriter := zip.NewWriter(buf)
|
||||
docWriter, _ := zipWriter.Create("word/document.xml")
|
||||
docWriter.Write([]byte(`<w:document><w:body><w:p><w:r><w:t>Cross-Site Scripting</w:t></w:r></w:p></w:body></w:document>`))
|
||||
zipWriter.Close()
|
||||
|
||||
body := new(bytes.Buffer)
|
||||
writer := multipart.NewWriter(body)
|
||||
part, _ := writer.CreateFormFile("file", "fake_pentest.docx")
|
||||
part.Write(buf.Bytes())
|
||||
writer.Close()
|
||||
|
||||
reqUp := httptest.NewRequest(http.MethodPost, "/api/reports/upload", body)
|
||||
reqUp.Header.Set("Content-Type", writer.FormDataContentType())
|
||||
rrUp := httptest.NewRecorder()
|
||||
h.HandleUploadReport(rrUp, reqUp)
|
||||
|
||||
reqView := httptest.NewRequest(http.MethodGet, "/api/reports/view/fake_pentest.docx", nil)
|
||||
reqView.SetPathValue("id", "fake_pentest.docx")
|
||||
rrView := httptest.NewRecorder()
|
||||
h.HandleViewReport(rrView, reqView)
|
||||
|
||||
if !strings.Contains(rrView.Body.String(), "Cross-Site Scripting") {
|
||||
t.Errorf("DOCX-to-HTML failed. Body: %s", rrView.Body.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestDraftQueueLifecycle(t *testing.T) {
|
||||
h, db := setupTestReport(t)
|
||||
defer db.Close()
|
||||
|
||||
reportID := "report-uuid-123.pdf"
|
||||
|
||||
// Save Draft
|
||||
draftPayload := []byte(`{"title": "SQLi", "severity": "High", "description": "Page 4"}`)
|
||||
reqPost := httptest.NewRequest(http.MethodPost, "/api/drafts/report/"+reportID, bytes.NewBuffer(draftPayload))
|
||||
reqPost.SetPathValue("id", reportID)
|
||||
rrPost := httptest.NewRecorder()
|
||||
h.HandleSaveDraft(rrPost, reqPost)
|
||||
|
||||
if rrPost.Code >= 400 {
|
||||
t.Fatalf("Failed to save draft! HTTP Code: %d, Error: %s", rrPost.Code, rrPost.Body.String())
|
||||
}
|
||||
|
||||
reqGet := httptest.NewRequest(http.MethodGet, "/api/drafts/report/"+reportID, nil)
|
||||
reqGet.SetPathValue("id", reportID)
|
||||
rrGet := httptest.NewRecorder()
|
||||
h.HandleGetDrafts(rrGet, reqGet)
|
||||
|
||||
var drafts []domain.DraftTicket
|
||||
json.NewDecoder(rrGet.Body).Decode(&drafts)
|
||||
if len(drafts) != 1 || drafts[0].Title != "SQLi" {
|
||||
t.Fatalf("Draft GET mismatch")
|
||||
}
|
||||
|
||||
// Delete Draft
|
||||
reqDel := httptest.NewRequest(http.MethodDelete, "/api/drafts/1", nil)
|
||||
reqDel.SetPathValue("draft_id", "1")
|
||||
rrDel := httptest.NewRecorder()
|
||||
h.HandleDeleteDraft(rrDel, reqDel)
|
||||
}
|
||||
BIN
pkg/report/testdata/fake_pentest.docx
vendored
Normal file
BIN
pkg/report/testdata/fake_pentest.docx
vendored
Normal file
Binary file not shown.
1
pkg/report/testdata/test_report.pdf
vendored
Normal file
1
pkg/report/testdata/test_report.pdf
vendored
Normal file
@@ -0,0 +1 @@
|
||||
%PDF-1.4 Fake PDF Content
|
||||
Reference in New Issue
Block a user