432 lines
11 KiB
Go
432 lines
11 KiB
Go
package handler
|
|
|
|
import (
|
|
"bufio"
|
|
"bytes"
|
|
"encoding/csv"
|
|
"fmt"
|
|
"io"
|
|
"log"
|
|
"net/http"
|
|
"os"
|
|
"path/filepath"
|
|
"strings"
|
|
"time"
|
|
|
|
"dealroom/internal/rbac"
|
|
"dealroom/templates"
|
|
"github.com/xuri/excelize/v2"
|
|
)
|
|
|
|
func (h *Handler) handleRequestList(w http.ResponseWriter, r *http.Request) {
|
|
profile := getProfile(r.Context())
|
|
deals := h.getDeals(profile)
|
|
|
|
// Get all requests grouped by deal
|
|
dealRequests := make(map[string][]*templates.RequestsByGroup)
|
|
for _, deal := range deals {
|
|
reqs := h.getRequests(deal.ID, profile)
|
|
// Group by buyer_group
|
|
groups := make(map[string][]*templates.RequestItem)
|
|
for _, req := range reqs {
|
|
group := req.BuyerGroup
|
|
if group == "" {
|
|
group = "Unassigned"
|
|
}
|
|
groups[group] = append(groups[group], &templates.RequestItem{
|
|
ID: req.ID,
|
|
ItemNumber: req.ItemNumber,
|
|
Section: req.Section,
|
|
Description: req.Description,
|
|
Priority: req.Priority,
|
|
AtlasStatus: req.AtlasStatus,
|
|
AtlasNote: req.AtlasNote,
|
|
Confidence: req.Confidence,
|
|
BuyerComment: req.BuyerComment,
|
|
SellerComment: req.SellerComment,
|
|
BuyerGroup: req.BuyerGroup,
|
|
})
|
|
}
|
|
var groupList []*templates.RequestsByGroup
|
|
for name, items := range groups {
|
|
groupList = append(groupList, &templates.RequestsByGroup{Name: name, Requests: items})
|
|
}
|
|
dealRequests[deal.ID] = groupList
|
|
}
|
|
|
|
templates.RequestListPage(profile, deals, dealRequests).Render(r.Context(), w)
|
|
}
|
|
|
|
func (h *Handler) handleRequestListUpload(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
profile := getProfile(r.Context())
|
|
err := r.ParseMultipartForm(10 << 20) // 10MB
|
|
if err != nil {
|
|
http.Error(w, "Error parsing form", 400)
|
|
return
|
|
}
|
|
|
|
dealID := r.FormValue("deal_id")
|
|
targetGroup := r.FormValue("target_group") // "all" or specific group name
|
|
uploadMode := r.FormValue("upload_mode") // "replace", "add", "group_specific"
|
|
convertFolders := r.FormValue("convert_folders") // "yes" or "no"
|
|
|
|
if dealID == "" {
|
|
http.Error(w, "Deal ID required", 400)
|
|
return
|
|
}
|
|
|
|
file, header, err := r.FormFile("request_list")
|
|
if err != nil {
|
|
http.Error(w, "File is required", 400)
|
|
return
|
|
}
|
|
defer file.Close()
|
|
|
|
// Read entire file into memory so we can detect type and re-read if needed
|
|
raw, err := io.ReadAll(file)
|
|
if err != nil {
|
|
http.Error(w, "Error reading file", 500)
|
|
return
|
|
}
|
|
|
|
// Save uploaded file to data/uploads/ for inspection and reprocessing
|
|
uploadsDir := "data/uploads"
|
|
os.MkdirAll(uploadsDir, 0755)
|
|
stamp := time.Now().Format("20060102-150405")
|
|
saveName := filepath.Join(uploadsDir, stamp+"-"+dealID+"-"+header.Filename)
|
|
if err := os.WriteFile(saveName, raw, 0644); err != nil {
|
|
log.Printf("Warning: could not save uploaded file: %v", err)
|
|
} else {
|
|
log.Printf("Saved upload: %s (%d bytes)", saveName, len(raw))
|
|
}
|
|
|
|
// Detect XLSX by filename extension or magic bytes (PK = zip/xlsx)
|
|
fname := strings.ToLower(header.Filename)
|
|
isXLSX := strings.HasSuffix(fname, ".xlsx") || strings.HasSuffix(fname, ".xls") ||
|
|
(len(raw) >= 2 && raw[0] == 'P' && raw[1] == 'K')
|
|
|
|
type reqRow struct {
|
|
section, itemNumber, description, priority string
|
|
}
|
|
var rows [][]string
|
|
|
|
if isXLSX {
|
|
// Parse XLSX with excelize
|
|
xf, err := excelize.OpenReader(bytes.NewReader(raw))
|
|
if err != nil {
|
|
http.Error(w, "Error parsing XLSX: "+err.Error(), 400)
|
|
return
|
|
}
|
|
sheetName := xf.GetSheetName(0)
|
|
xlRows, err := xf.GetRows(sheetName)
|
|
if err != nil {
|
|
http.Error(w, "Error reading sheet: "+err.Error(), 400)
|
|
return
|
|
}
|
|
rows = xlRows
|
|
} else {
|
|
// Parse CSV
|
|
reader := csv.NewReader(bufio.NewReader(bytes.NewReader(raw)))
|
|
reader.FieldsPerRecord = -1
|
|
reader.TrimLeadingSpace = true
|
|
csvRows, err := reader.ReadAll()
|
|
if err != nil {
|
|
http.Error(w, "Error parsing CSV: "+err.Error(), 400)
|
|
return
|
|
}
|
|
rows = csvRows
|
|
}
|
|
|
|
// Log first 12 rows of the file for debugging
|
|
for ri, row := range rows {
|
|
if ri >= 12 {
|
|
break
|
|
}
|
|
log.Printf("[upload-debug] row %d: %v\n", ri, row)
|
|
}
|
|
|
|
// Scan up to first 12 rows to find the actual header row (highest keyword score).
|
|
// Many DD checklists have title/metadata rows before the real column headers.
|
|
idxSection := -1
|
|
idxItem := -1
|
|
idxDesc := -1
|
|
idxPriority := -1
|
|
headerRowIdx := 0
|
|
bestScore := 0
|
|
|
|
for ri, record := range rows {
|
|
if ri >= 12 {
|
|
break
|
|
}
|
|
score := 0
|
|
tmpSection, tmpItem, tmpDesc, tmpPri := -1, -1, -1, -1
|
|
for ci, cell := range record {
|
|
h := strings.ToLower(strings.TrimSpace(cell))
|
|
if h == "" {
|
|
continue
|
|
}
|
|
if contains(h, "section", "category", "topic", "area", "phase", "workstream") {
|
|
tmpSection = ci
|
|
score += 3
|
|
} else if contains(h, "description", "request", "document", "information requested", "detail") {
|
|
tmpDesc = ci
|
|
score += 3
|
|
} else if contains(h, "priority", "urgency", "importance", "criticality") {
|
|
tmpPri = ci
|
|
score += 2
|
|
} else if h == "#" || h == "no." || h == "no" || h == "item #" || h == "item#" ||
|
|
contains(h, "item no", "ref no", "ref #") {
|
|
tmpItem = ci
|
|
score += 2
|
|
}
|
|
}
|
|
if score > bestScore {
|
|
bestScore = score
|
|
headerRowIdx = ri
|
|
if tmpSection >= 0 {
|
|
idxSection = tmpSection
|
|
}
|
|
if tmpItem >= 0 {
|
|
idxItem = tmpItem
|
|
}
|
|
if tmpDesc >= 0 {
|
|
idxDesc = tmpDesc
|
|
}
|
|
if tmpPri >= 0 {
|
|
idxPriority = tmpPri
|
|
}
|
|
}
|
|
}
|
|
|
|
// If no header found, fall back to positional
|
|
if bestScore < 2 {
|
|
headerRowIdx = 0
|
|
idxSection = 0
|
|
idxItem = 1
|
|
idxDesc = 2
|
|
}
|
|
|
|
// If desc still not found, guess: pick the column with the longest average text
|
|
if idxDesc < 0 && len(rows) > headerRowIdx+1 {
|
|
maxLen := 0
|
|
for ci := range rows[headerRowIdx] {
|
|
total := 0
|
|
count := 0
|
|
for ri := headerRowIdx + 1; ri < len(rows) && ri < headerRowIdx+20; ri++ {
|
|
if ci < len(rows[ri]) {
|
|
total += len(strings.TrimSpace(rows[ri][ci]))
|
|
count++
|
|
}
|
|
}
|
|
avg := 0
|
|
if count > 0 {
|
|
avg = total / count
|
|
}
|
|
if avg > maxLen && ci != idxSection && ci != idxItem {
|
|
maxLen = avg
|
|
idxDesc = ci
|
|
}
|
|
}
|
|
}
|
|
|
|
log.Printf("[upload-debug] header at row %d (score=%d) | section=%d item=%d desc=%d priority=%d\n",
|
|
headerRowIdx, bestScore, idxSection, idxItem, idxDesc, idxPriority)
|
|
|
|
var items []reqRow
|
|
for ri, record := range rows {
|
|
if ri <= headerRowIdx {
|
|
continue // skip title rows + header row itself
|
|
}
|
|
if len(record) == 0 {
|
|
continue
|
|
}
|
|
// Skip blank rows
|
|
allBlank := true
|
|
for _, c := range record {
|
|
if strings.TrimSpace(c) != "" {
|
|
allBlank = false
|
|
break
|
|
}
|
|
}
|
|
if allBlank {
|
|
continue
|
|
}
|
|
|
|
get := func(idx int) string {
|
|
if idx >= 0 && idx < len(record) {
|
|
return strings.TrimSpace(record[idx])
|
|
}
|
|
return ""
|
|
}
|
|
|
|
desc := get(idxDesc)
|
|
if desc == "" {
|
|
continue // must have a description to be useful
|
|
}
|
|
|
|
priority := "medium"
|
|
if idxPriority >= 0 {
|
|
p := strings.ToLower(get(idxPriority))
|
|
switch {
|
|
case strings.Contains(p, "high") || strings.Contains(p, "critical") || strings.Contains(p, "urgent"):
|
|
priority = "high"
|
|
case strings.Contains(p, "low") || strings.Contains(p, "nice") || strings.Contains(p, "optional"):
|
|
priority = "low"
|
|
}
|
|
}
|
|
|
|
items = append(items, reqRow{
|
|
section: get(idxSection),
|
|
itemNumber: get(idxItem),
|
|
description: desc,
|
|
priority: priority,
|
|
})
|
|
}
|
|
|
|
if len(items) == 0 {
|
|
http.Error(w, "No valid items found in file — check that the sheet has a header row and a description column", 400)
|
|
return
|
|
}
|
|
|
|
// Handle upload mode
|
|
if uploadMode == "replace" {
|
|
if targetGroup == "all" || targetGroup == "" {
|
|
h.db.Exec("DELETE FROM diligence_requests WHERE deal_id = ?", dealID)
|
|
} else {
|
|
h.db.Exec("DELETE FROM diligence_requests WHERE deal_id = ? AND buyer_group = ?", dealID, targetGroup)
|
|
}
|
|
}
|
|
|
|
buyerGroup := ""
|
|
if targetGroup != "all" && targetGroup != "" {
|
|
buyerGroup = targetGroup
|
|
}
|
|
isBuyerSpecific := 0
|
|
if uploadMode == "group_specific" && buyerGroup != "" {
|
|
isBuyerSpecific = 1
|
|
}
|
|
|
|
// Insert request items
|
|
for _, item := range items {
|
|
id := generateID("req")
|
|
h.db.Exec(`INSERT INTO diligence_requests (id, deal_id, item_number, section, description, priority, buyer_group, is_buyer_specific, visible_to_buyer_group, created_by) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
id, dealID, item.itemNumber, item.section, item.description, item.priority, buyerGroup, isBuyerSpecific, buyerGroup, profile.ID)
|
|
}
|
|
|
|
// Optionally convert folder structure
|
|
if convertFolders == "yes" {
|
|
// Create folders from unique sections
|
|
sections := make(map[string]bool)
|
|
for _, item := range items {
|
|
sections[item.section] = true
|
|
}
|
|
for section := range sections {
|
|
var existing int
|
|
h.db.QueryRow("SELECT COUNT(*) FROM folders WHERE deal_id = ? AND name = ?", dealID, section).Scan(&existing)
|
|
if existing == 0 {
|
|
folderID := generateID("folder")
|
|
h.db.Exec("INSERT INTO folders (id, deal_id, parent_id, name, created_by) VALUES (?, ?, '', ?, ?)",
|
|
folderID, dealID, section, profile.ID)
|
|
}
|
|
}
|
|
}
|
|
|
|
// Auto-assign existing files to matching requests
|
|
h.autoAssignFilesToRequests(dealID)
|
|
|
|
// Auto-assign by keyword rules
|
|
h.autoAssignByRules(dealID)
|
|
|
|
h.logActivity(dealID, profile.ID, profile.OrganizationID, "upload", "request_list", fmt.Sprintf("%d items", len(items)), "")
|
|
|
|
http.Redirect(w, r, "/deals/"+dealID+"?tab=requests", http.StatusSeeOther)
|
|
}
|
|
|
|
func (h *Handler) autoAssignFilesToRequests(dealID string) {
|
|
// Get all unlinked requests
|
|
rows, err := h.db.Query("SELECT id, description, section FROM diligence_requests WHERE deal_id = ? AND (linked_file_ids = '' OR linked_file_ids IS NULL)", dealID)
|
|
if err != nil {
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type reqInfo struct {
|
|
id, description, section string
|
|
}
|
|
var reqs []reqInfo
|
|
for rows.Next() {
|
|
var r reqInfo
|
|
rows.Scan(&r.id, &r.description, &r.section)
|
|
reqs = append(reqs, r)
|
|
}
|
|
|
|
// Get all files
|
|
files, err := h.db.Query("SELECT id, name FROM files WHERE deal_id = ?", dealID)
|
|
if err != nil {
|
|
return
|
|
}
|
|
defer files.Close()
|
|
|
|
type fileInfo struct {
|
|
id, name string
|
|
}
|
|
var fileList []fileInfo
|
|
for files.Next() {
|
|
var f fileInfo
|
|
files.Scan(&f.id, &f.name)
|
|
fileList = append(fileList, f)
|
|
}
|
|
|
|
// Simple keyword matching
|
|
for _, req := range reqs {
|
|
words := strings.Fields(strings.ToLower(req.description))
|
|
for _, f := range fileList {
|
|
fname := strings.ToLower(f.name)
|
|
matchCount := 0
|
|
for _, w := range words {
|
|
if len(w) > 3 && strings.Contains(fname, w) {
|
|
matchCount++
|
|
}
|
|
}
|
|
if matchCount >= 2 {
|
|
h.db.Exec("UPDATE diligence_requests SET linked_file_ids = ? WHERE id = ?", f.id, req.id)
|
|
break
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// contains checks if s contains any of the given substrings.
|
|
func contains(s string, subs ...string) bool {
|
|
for _, sub := range subs {
|
|
if strings.Contains(s, sub) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
func (h *Handler) handleUpdateComment(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
profile := getProfile(r.Context())
|
|
reqID := r.FormValue("request_id")
|
|
value := r.FormValue("value")
|
|
|
|
field := "seller_comment"
|
|
if rbac.EffectiveIsBuyer(profile) {
|
|
field = "buyer_comment"
|
|
}
|
|
|
|
h.db.Exec("UPDATE diligence_requests SET "+field+" = ?, updated_at = datetime('now') WHERE id = ?", value, reqID)
|
|
|
|
w.Header().Set("Content-Type", "text/html")
|
|
w.Write([]byte(`<span class="text-xs text-green-400">✓ Saved</span>`))
|
|
}
|