342 lines
9.2 KiB
Go
342 lines
9.2 KiB
Go
package handler
|
|
|
|
import (
|
|
"bufio"
|
|
"bytes"
|
|
"encoding/csv"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
"strings"
|
|
|
|
"dealroom/internal/rbac"
|
|
"dealroom/templates"
|
|
"github.com/xuri/excelize/v2"
|
|
)
|
|
|
|
func (h *Handler) handleRequestList(w http.ResponseWriter, r *http.Request) {
|
|
profile := getProfile(r.Context())
|
|
deals := h.getDeals(profile)
|
|
|
|
// Get all requests grouped by deal
|
|
dealRequests := make(map[string][]*templates.RequestsByGroup)
|
|
for _, deal := range deals {
|
|
reqs := h.getRequests(deal.ID, profile)
|
|
// Group by buyer_group
|
|
groups := make(map[string][]*templates.RequestItem)
|
|
for _, req := range reqs {
|
|
group := req.BuyerGroup
|
|
if group == "" {
|
|
group = "Unassigned"
|
|
}
|
|
groups[group] = append(groups[group], &templates.RequestItem{
|
|
ID: req.ID,
|
|
ItemNumber: req.ItemNumber,
|
|
Section: req.Section,
|
|
Description: req.Description,
|
|
Priority: req.Priority,
|
|
AtlasStatus: req.AtlasStatus,
|
|
AtlasNote: req.AtlasNote,
|
|
Confidence: req.Confidence,
|
|
BuyerComment: req.BuyerComment,
|
|
SellerComment: req.SellerComment,
|
|
BuyerGroup: req.BuyerGroup,
|
|
})
|
|
}
|
|
var groupList []*templates.RequestsByGroup
|
|
for name, items := range groups {
|
|
groupList = append(groupList, &templates.RequestsByGroup{Name: name, Requests: items})
|
|
}
|
|
dealRequests[deal.ID] = groupList
|
|
}
|
|
|
|
templates.RequestListPage(profile, deals, dealRequests).Render(r.Context(), w)
|
|
}
|
|
|
|
func (h *Handler) handleRequestListUpload(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
profile := getProfile(r.Context())
|
|
err := r.ParseMultipartForm(10 << 20) // 10MB
|
|
if err != nil {
|
|
http.Error(w, "Error parsing form", 400)
|
|
return
|
|
}
|
|
|
|
dealID := r.FormValue("deal_id")
|
|
targetGroup := r.FormValue("target_group") // "all" or specific group name
|
|
uploadMode := r.FormValue("upload_mode") // "replace", "add", "group_specific"
|
|
convertFolders := r.FormValue("convert_folders") // "yes" or "no"
|
|
|
|
if dealID == "" {
|
|
http.Error(w, "Deal ID required", 400)
|
|
return
|
|
}
|
|
|
|
file, header, err := r.FormFile("request_list")
|
|
if err != nil {
|
|
http.Error(w, "File is required", 400)
|
|
return
|
|
}
|
|
defer file.Close()
|
|
|
|
// Read entire file into memory so we can detect type and re-read if needed
|
|
raw, err := io.ReadAll(file)
|
|
if err != nil {
|
|
http.Error(w, "Error reading file", 500)
|
|
return
|
|
}
|
|
|
|
// Detect XLSX by filename extension or magic bytes (PK = zip/xlsx)
|
|
fname := strings.ToLower(header.Filename)
|
|
isXLSX := strings.HasSuffix(fname, ".xlsx") || strings.HasSuffix(fname, ".xls") ||
|
|
(len(raw) >= 2 && raw[0] == 'P' && raw[1] == 'K')
|
|
|
|
type reqRow struct {
|
|
section, itemNumber, description, priority string
|
|
}
|
|
var rows [][]string
|
|
|
|
if isXLSX {
|
|
// Parse XLSX with excelize
|
|
xf, err := excelize.OpenReader(bytes.NewReader(raw))
|
|
if err != nil {
|
|
http.Error(w, "Error parsing XLSX: "+err.Error(), 400)
|
|
return
|
|
}
|
|
sheetName := xf.GetSheetName(0)
|
|
xlRows, err := xf.GetRows(sheetName)
|
|
if err != nil {
|
|
http.Error(w, "Error reading sheet: "+err.Error(), 400)
|
|
return
|
|
}
|
|
rows = xlRows
|
|
} else {
|
|
// Parse CSV
|
|
reader := csv.NewReader(bufio.NewReader(bytes.NewReader(raw)))
|
|
reader.FieldsPerRecord = -1
|
|
reader.TrimLeadingSpace = true
|
|
csvRows, err := reader.ReadAll()
|
|
if err != nil {
|
|
http.Error(w, "Error parsing CSV: "+err.Error(), 400)
|
|
return
|
|
}
|
|
rows = csvRows
|
|
}
|
|
|
|
// Detect column indices from header row using common DD checklist naming conventions
|
|
// Falls back to positional (col 0=section, 1=item#, 2=description, 3=priority)
|
|
idxSection := 0
|
|
idxItem := 1
|
|
idxDesc := 2
|
|
idxPriority := -1
|
|
|
|
if len(rows) > 0 {
|
|
for ci, cell := range rows[0] {
|
|
h := strings.ToLower(strings.TrimSpace(cell))
|
|
switch {
|
|
case contains(h, "section", "category", "topic", "area", "phase", "workstream"):
|
|
idxSection = ci
|
|
case contains(h, "item #", "item#", "item no", "no.", "ref", "number", "#"):
|
|
idxItem = ci
|
|
case contains(h, "description", "request", "document", "information", "detail", "item") && ci != idxSection:
|
|
idxDesc = ci
|
|
case contains(h, "priority", "urgency", "importance", "criticality"):
|
|
idxPriority = ci
|
|
}
|
|
}
|
|
}
|
|
|
|
var items []reqRow
|
|
for ri, record := range rows {
|
|
if ri == 0 {
|
|
continue // skip header
|
|
}
|
|
if len(record) == 0 {
|
|
continue
|
|
}
|
|
// Skip blank rows
|
|
allBlank := true
|
|
for _, c := range record {
|
|
if strings.TrimSpace(c) != "" {
|
|
allBlank = false
|
|
break
|
|
}
|
|
}
|
|
if allBlank {
|
|
continue
|
|
}
|
|
|
|
get := func(idx int) string {
|
|
if idx >= 0 && idx < len(record) {
|
|
return strings.TrimSpace(record[idx])
|
|
}
|
|
return ""
|
|
}
|
|
|
|
desc := get(idxDesc)
|
|
if desc == "" {
|
|
continue // must have a description to be useful
|
|
}
|
|
|
|
priority := "medium"
|
|
if idxPriority >= 0 {
|
|
p := strings.ToLower(get(idxPriority))
|
|
switch {
|
|
case strings.Contains(p, "high") || strings.Contains(p, "critical") || strings.Contains(p, "urgent"):
|
|
priority = "high"
|
|
case strings.Contains(p, "low") || strings.Contains(p, "nice") || strings.Contains(p, "optional"):
|
|
priority = "low"
|
|
}
|
|
}
|
|
|
|
items = append(items, reqRow{
|
|
section: get(idxSection),
|
|
itemNumber: get(idxItem),
|
|
description: desc,
|
|
priority: priority,
|
|
})
|
|
}
|
|
|
|
if len(items) == 0 {
|
|
http.Error(w, "No valid items found in file — check that the sheet has a header row and a description column", 400)
|
|
return
|
|
}
|
|
|
|
// Handle upload mode
|
|
if uploadMode == "replace" {
|
|
if targetGroup == "all" || targetGroup == "" {
|
|
h.db.Exec("DELETE FROM diligence_requests WHERE deal_id = ?", dealID)
|
|
} else {
|
|
h.db.Exec("DELETE FROM diligence_requests WHERE deal_id = ? AND buyer_group = ?", dealID, targetGroup)
|
|
}
|
|
}
|
|
|
|
buyerGroup := ""
|
|
if targetGroup != "all" && targetGroup != "" {
|
|
buyerGroup = targetGroup
|
|
}
|
|
isBuyerSpecific := 0
|
|
if uploadMode == "group_specific" && buyerGroup != "" {
|
|
isBuyerSpecific = 1
|
|
}
|
|
|
|
// Insert request items
|
|
for _, item := range items {
|
|
id := generateID("req")
|
|
h.db.Exec(`INSERT INTO diligence_requests (id, deal_id, item_number, section, description, priority, buyer_group, is_buyer_specific, visible_to_buyer_group, created_by) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
id, dealID, item.itemNumber, item.section, item.description, item.priority, buyerGroup, isBuyerSpecific, buyerGroup, profile.ID)
|
|
}
|
|
|
|
// Optionally convert folder structure
|
|
if convertFolders == "yes" {
|
|
// Create folders from unique sections
|
|
sections := make(map[string]bool)
|
|
for _, item := range items {
|
|
sections[item.section] = true
|
|
}
|
|
for section := range sections {
|
|
var existing int
|
|
h.db.QueryRow("SELECT COUNT(*) FROM folders WHERE deal_id = ? AND name = ?", dealID, section).Scan(&existing)
|
|
if existing == 0 {
|
|
folderID := generateID("folder")
|
|
h.db.Exec("INSERT INTO folders (id, deal_id, parent_id, name, created_by) VALUES (?, ?, '', ?, ?)",
|
|
folderID, dealID, section, profile.ID)
|
|
}
|
|
}
|
|
}
|
|
|
|
// Auto-assign existing files to matching requests
|
|
h.autoAssignFilesToRequests(dealID)
|
|
|
|
h.logActivity(dealID, profile.ID, profile.OrganizationID, "upload", "request_list", fmt.Sprintf("%d items", len(items)), "")
|
|
|
|
http.Redirect(w, r, "/deals/"+dealID, http.StatusSeeOther)
|
|
}
|
|
|
|
func (h *Handler) autoAssignFilesToRequests(dealID string) {
|
|
// Get all unlinked requests
|
|
rows, err := h.db.Query("SELECT id, description, section FROM diligence_requests WHERE deal_id = ? AND (linked_file_ids = '' OR linked_file_ids IS NULL)", dealID)
|
|
if err != nil {
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type reqInfo struct {
|
|
id, description, section string
|
|
}
|
|
var reqs []reqInfo
|
|
for rows.Next() {
|
|
var r reqInfo
|
|
rows.Scan(&r.id, &r.description, &r.section)
|
|
reqs = append(reqs, r)
|
|
}
|
|
|
|
// Get all files
|
|
files, err := h.db.Query("SELECT id, name FROM files WHERE deal_id = ?", dealID)
|
|
if err != nil {
|
|
return
|
|
}
|
|
defer files.Close()
|
|
|
|
type fileInfo struct {
|
|
id, name string
|
|
}
|
|
var fileList []fileInfo
|
|
for files.Next() {
|
|
var f fileInfo
|
|
files.Scan(&f.id, &f.name)
|
|
fileList = append(fileList, f)
|
|
}
|
|
|
|
// Simple keyword matching
|
|
for _, req := range reqs {
|
|
words := strings.Fields(strings.ToLower(req.description))
|
|
for _, f := range fileList {
|
|
fname := strings.ToLower(f.name)
|
|
matchCount := 0
|
|
for _, w := range words {
|
|
if len(w) > 3 && strings.Contains(fname, w) {
|
|
matchCount++
|
|
}
|
|
}
|
|
if matchCount >= 2 {
|
|
h.db.Exec("UPDATE diligence_requests SET linked_file_ids = ? WHERE id = ?", f.id, req.id)
|
|
break
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// contains checks if s contains any of the given substrings.
|
|
func contains(s string, subs ...string) bool {
|
|
for _, sub := range subs {
|
|
if strings.Contains(s, sub) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
func (h *Handler) handleUpdateComment(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
profile := getProfile(r.Context())
|
|
reqID := r.FormValue("request_id")
|
|
value := r.FormValue("value")
|
|
|
|
field := "seller_comment"
|
|
if rbac.EffectiveIsBuyer(profile) {
|
|
field = "buyer_comment"
|
|
}
|
|
|
|
h.db.Exec("UPDATE diligence_requests SET "+field+" = ?, updated_at = datetime('now') WHERE id = ?", value, reqID)
|
|
|
|
w.Header().Set("Content-Type", "text/html")
|
|
w.Write([]byte(`<span class="text-xs text-green-400">✓ Saved</span>`))
|
|
}
|