Portal: upload handler, dossier rework, dashboard updates, normalize fixes

- portal/upload.go: new file upload handler (360 lines)
- portal/templates/dossier.tmpl: major rework (469 changes)
- portal/templates/upload.tmpl, dashboard.tmpl: UI updates
- lib/normalize.go, llm.go, config.go: library fixes
- portal/dossier_sections.go, main.go: portal logic
- portal/lang/en.yaml: string updates
- cmd/populate-search-key: search key population tool
- Makefile, style.css: build and style updates
This commit is contained in:
James 2026-02-25 20:01:11 -05:00
parent cc1dd7690c
commit 033d558535
45 changed files with 2089 additions and 210 deletions

View File

@ -120,6 +120,7 @@ deploy-prod: check-db all $(BINDIR)/decrypt $(BINDIR)/fips-check
rsync -avz --delete portal/templates/ $(PROD_HOST):$(DEPLOY_DIR)/templates/
rsync -avz portal/static/ $(PROD_HOST):$(DEPLOY_DIR)/static/
rsync -avz portal/lang/ $(PROD_HOST):$(DEPLOY_DIR)/lang/
rsync -avz api/tracker_prompts/ $(PROD_HOST):$(DEPLOY_DIR)/tracker_prompts/
ssh $(PROD_HOST) "$(DEPLOY_DIR)/start.sh"
@echo ""
ssh $(PROD_HOST) "$(DEPLOY_DIR)/status.sh"

View File

@ -0,0 +1,13 @@
Extract clinical assessments and examination findings from this medical document. Return a JSON array or null.
Each entry:
- type: "screening", "examination", "developmental"
- value: (empty)
- summary: assessment name or description, e.g. "Neurological examination"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"instrument": "...", "findings": "...", "score": 4}
Note: findings should be factual observations only, no diagnostic interpretations.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,13 @@
Extract birth and perinatal details from this medical document. Return a JSON array or null.
Each entry:
- type: "delivery"
- value: (empty)
- summary: date + type, e.g. "Feb 26, 2020, premature 26 SSW"
- timestamp: "YYYY-MM-DD" birth date
- data: {"type": "premature/vaginal/cesarean", "gestational_weeks": 26, "weight_g": 3200, "apgar_1": 8, "apgar_5": 9, "complications": "..."}
Include only fields present in the document.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,11 @@
Extract consultation/visit records from this medical document. Return a JSON array or null.
Each entry:
- type: visit subtype ("visit", "referral", "follow_up", "letter")
- value: (empty)
- summary: provider + date, e.g. "Prof. Dr. Péraud, Aug 2022"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"provider": "...", "specialty": "...", "location": "...", "reason": "..."}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,13 @@
Extract medical devices and implants from this medical document. Return a JSON array or null.
Each entry:
- type: "implant", "external", "prosthetic"
- value: (empty)
- summary: device name, e.g. "Medtronic Micra AV"
- timestamp: "YYYY-MM-DD" implant/start date if mentioned
- data: {"device": "...", "manufacturer": "...", "model": "...", "serial": "...", "settings": {"pressure": "5 cmH₂O"}, "location": "..."}
Extract each distinct device as a separate entry. Include current settings if documented.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,16 @@
Extract diagnoses and medical conditions from this medical document. Return a JSON array or null.
Each entry:
- type: status ("active", "resolved", "historical")
- value: (empty)
- summary: condition name, e.g. "Posthämorrhagischer Hydrocephalus"
- search_key: ICD-10 code lowercase if available, e.g. "g91.1"
- timestamp: "YYYY-MM-DD" if diagnosis date mentioned
- data: {"icd10": "...", "status": "active/resolved", "source": "document"}
Only extract DISEASES and CONDITIONS — not procedures.
"Z. n. [procedure]" (status post procedure) belongs in surgical history, not here.
Keep the original language of the condition name.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,16 @@
Extract PHYSICAL EXERCISE and SPORTS activities from this medical document. Return a JSON array or null.
This is ONLY for voluntary physical activity: gym, running, swimming, cycling, hiking, yoga, team sports, etc.
Do NOT extract therapeutic treatments here (physical therapy, Feldenkrais, occupational therapy, speech therapy) — those belong in therapies.
If the document does not mention any exercise or sports activities, return null.
Each entry:
- type: "activity"
- value: measurement if available, e.g. "5.2"
- summary: activity + details, e.g. "5.2 km run, 30 min"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"activity": "...", "distance_km": 5.2, "duration_min": 30}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,16 @@
Extract FAMILY medical history from this medical document. Return a JSON array or null.
This is ONLY for hereditary/genetic conditions in BLOOD RELATIVES (parents, siblings, grandparents, aunts, uncles, cousins). NOT the patient's own conditions — those belong in other categories.
Look for phrases like: "family history of...", "mother/father had...", "runs in the family", "familial", "hereditary".
If the document does not mention any family member's health conditions, return null.
Each entry:
- type: relation ("mother", "father", "sibling", "grandparent", "aunt", "uncle", "cousin", "other")
- value: (empty)
- summary: relation + condition, e.g. "Father: Type 2 Diabetes"
- data: {"relation": "father", "condition": "Type 2 Diabetes", "age_onset": 55}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,17 @@
Extract FERTILITY and REPRODUCTIVE HEALTH data from this medical document. Return a JSON array or null.
This is ONLY for: menstrual cycles, ovulation tracking, IVF/IUI treatments, sperm analysis, hormone levels related to reproduction, fertility medications, miscarriages, pregnancy complications.
Do NOT extract birth/delivery here — that belongs in birth category.
Do NOT extract general pregnancy mentions unless they involve fertility treatment or complications.
If the document does not mention any fertility or reproductive health data, return null.
Each entry:
- type: "cycle", "treatment", "observation"
- value: (empty)
- summary: description, e.g. "IVF cycle 2, Feb 2025"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"description": "...", "details": "..."}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,18 @@
Extract LIFE and MEDICAL HISTORY events from this medical document. Return a JSON array or null.
This is for biographical and perinatal events: premature birth, NICU stays, developmental milestones, accidents, major life events that affected health.
Do NOT extract diseases/conditions here — those belong in diagnoses.
Do NOT extract surgeries here — those belong in procedures.
Do NOT extract hospital admissions — those belong in hospitalizations.
If the document does not mention any historical life events, return null.
Each entry:
- type: "perinatal", "developmental", "accident", "event"
- value: (empty)
- summary: event + time, e.g. "Premature birth at 26 weeks, Feb 2020"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"event": "...", "age_at_event": "...", "details": "..."}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,11 @@
Extract hospitalizations from this medical document. Return a JSON array or null.
Each entry:
- type: "admission", "emergency", "day_case"
- value: (empty)
- summary: reason + facility, e.g. "Shunt revision, Universitätsklinikum Ulm"
- timestamp: "YYYY-MM-DD" admission date if mentioned
- data: {"reason": "...", "facility": "...", "discharge": "YYYY-MM-DD", "duration_days": 5}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,13 @@
Extract imaging study references from this medical document. Return a JSON array or null.
Each entry:
- type: modality lowercase ("mri", "ct", "xray", "ultrasound")
- value: (empty)
- summary: body part + modality + date, e.g. "Brain MRI (Aug 2022)"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"body_part": "...", "modality": "...", "institution": "...", "findings_summary": "brief structural description, no interpretation"}
Note: findings_summary is factual anatomy only ("enlarged ventricles", "3cm mass in left lobe"). NO diagnostic opinions.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,15 @@
Extract laboratory test results from this medical document. Return a JSON array or null.
Each entry:
- type: "result"
- value: numeric value as string, e.g. "14.2"
- summary: name: value unit, e.g. "Hemoglobin: 14.2 g/dL"
- search_key: test name lowercase, e.g. "hemoglobin"
- timestamp: "YYYY-MM-DD" if collection date mentioned
- data: {"test_name": "...", "numeric_value": 14.2, "unit": "g/dL"}
Do NOT include reference ranges, flags (H/L), or interpretations.
Extract every individual test result as a separate entry.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,13 @@
Extract medications from this medical document. Return a JSON array or null.
Each entry:
- type: "prescription"
- value: (empty)
- summary: med name + dose, e.g. "Metformin 500mg"
- timestamp: "YYYY-MM-DD" if start date mentioned
- data: {"medication": "...", "dosage": "...", "frequency": "...", "prescriber": "..."}
Extract each distinct medication as a separate entry.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,13 @@
Extract clinical notes and free-text observations from this medical document. Return a JSON array or null.
Each entry:
- type: "general", "progress", "clinical"
- value: (empty)
- summary: note title or first line, e.g. "Follow-up assessment"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"text": "full note text..."}
Only extract distinct notes that don't fit other categories (not diagnoses, not procedures, not vitals).
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,11 @@
Extract nutrition and diet information from this medical document. Return a JSON array or null.
Each entry:
- type: "observation", "restriction", "tolerance"
- value: (empty)
- summary: brief description, e.g. "Tolerating solid foods well"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"description": "...", "details": "..."}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,13 @@
Extract healthcare providers from this medical document. Return a JSON array or null.
Each entry:
- type: "physician", "clinic", "hospital", "therapist"
- value: (empty)
- summary: name + specialty, e.g. "Prof. Dr. Péraud, Neurosurgery"
- data: {"name": "...", "role": "...", "specialty": "...", "institution": "...", "phone": "...", "address": "..."}
Only extract providers who TREATED or REFERRED the patient.
Ignore names from letterheads, board members, administrative staff, or signatories who didn't provide care.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,11 @@
Extract supplements and vitamins from this medical document. Return a JSON array or null.
Each entry:
- type: "vitamin", "mineral", "herbal", "other"
- value: (empty)
- summary: supplement name + dosage, e.g. "Vitamin D3 2000 IU"
- timestamp: "YYYY-MM-DD" if start date mentioned
- data: {"supplement": "...", "dosage": "...", "frequency": "..."}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,17 @@
Extract SURGICAL PROCEDURES from this medical document. Return a JSON array or null.
Only extract actual operations/procedures that were PERFORMED — cutting, implanting, removing, scoping.
Do NOT extract diagnoses, conditions, or diseases here — those belong elsewhere.
Do NOT extract hospital admissions — those belong elsewhere.
Each entry:
- type: "inpatient", "outpatient", "endoscopic"
- value: (empty)
- summary: procedure + date, e.g. "VP-Shunt Implantation, Jul 2020"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"procedure": "...", "surgeon": "...", "facility": "...", "details": "..."}
Extract each distinct procedure as a separate entry. Include technique details in data.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,15 @@
Extract symptoms and complaints from this medical document. Return a JSON array or null.
Each entry:
- type: "chronic", "acute", "observation"
- value: (empty)
- summary: symptom description, e.g. "Head tilt to the right"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"symptom": "...", "severity": "...", "details": "..."}
Only extract SYMPTOMS — things the patient experiences or displays.
NOT diagnoses (those go elsewhere), NOT imaging findings, NOT test results.
A symptom is something observable: pain, difficulty walking, head tilt, irritability, fever.
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,11 @@
Extract therapies and rehabilitation from this medical document. Return a JSON array or null.
Each entry:
- type: "physical", "occupational", "speech", "behavioral", "other"
- value: (empty)
- summary: therapy + provider, e.g. "Feldenkrais therapy, 3 months"
- timestamp: "YYYY-MM-DD" start date if mentioned
- data: {"therapy": "...", "provider": "...", "frequency": "...", "duration": "...", "goal": "..."}
Document:
{{MARKDOWN}}

View File

@ -0,0 +1,13 @@
Extract vital sign measurements from this medical document. Return a JSON array or null.
Each entry:
- type: vital type ("weight", "blood_pressure", "heart_rate", "temperature", "oxygen_saturation", "height", "head_circumference")
- value: measurement as string, e.g. "75.5"
- summary: value + unit, e.g. "75.5 kg"
- timestamp: "YYYY-MM-DD" if date mentioned
- data: {"unit": "...", "numeric_value": 75.5}
For blood pressure: value "120/80", data: {"systolic": 120, "diastolic": 80, "unit": "mmHg"}
Document:
{{MARKDOWN}}

View File

@ -3,6 +3,7 @@ package main
import (
"fmt"
"log"
"os"
"inou/lib"
)
@ -11,34 +12,22 @@ func main() {
log.Fatal("lib.Init failed:", err)
}
lib.ConfigInit()
// Get all dossiers
allDossiers, err := lib.DossierList(nil, nil) // nil ctx = system, nil filter = all
if err != nil {
log.Fatal("List dossiers:", err)
}
type dossierRow struct {
DossierID string
Count int
}
var dossiers []dossierRow
for _, d := range allDossiers {
count, _ := lib.EntryCount(nil, d.DossierID, lib.CategoryLab, "")
if count > 0 {
dossiers = append(dossiers, dossierRow{DossierID: d.DossierID, Count: count})
}
if err := lib.RefDBInit("/tank/inou/data/reference.db"); err != nil {
log.Fatal("RefDBInit:", err)
}
fmt.Printf("Found %d dossiers with lab data\n", len(dossiers))
dossierID := "d31ec40f59c30cf1"
if len(os.Args) > 1 {
dossierID = os.Args[1]
}
for i, d := range dossiers {
fmt.Printf("[%d/%d] Normalizing labs for dossier %s (%d entries)...\n",
i+1, len(dossiers), d.DossierID, d.Count)
count, _ := lib.EntryCount(nil, dossierID, lib.CategoryLab, "")
fmt.Printf("Normalizing %d lab entries for %s...\n", count, dossierID)
if err := lib.Normalize(d.DossierID, lib.CategoryLab); err != nil {
log.Printf("Warning: normalize failed for %s: %v", d.DossierID, err)
continue
}
if err := lib.Normalize(dossierID, lib.CategoryLab, func(p, t int) {
fmt.Printf(" batch %d/%d\n", p, t)
}); err != nil {
log.Fatal("Normalize failed:", err)
}
fmt.Println("Done!")

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -24,6 +24,7 @@ func Init() error {
var (
GeminiKey string = ""
AnthropicKey string = ""
FireworksKey string = ""
SystemAccessorID string = "7b3a3ee1c2776dcd" // Default fallback
)
@ -49,6 +50,8 @@ func ConfigInit() {
GeminiKey = value
case "ANTHROPIC_API_KEY":
AnthropicKey = value
case "FIREWORKS_API_KEY":
FireworksKey = value
case "SYSTEM_ACCESSOR_ID":
SystemAccessorID = value
}
@ -62,6 +65,9 @@ func ConfigInit() {
if AnthropicKey == "" {
AnthropicKey = os.Getenv("ANTHROPIC_API_KEY")
}
if FireworksKey == "" {
FireworksKey = os.Getenv("FIREWORKS_API_KEY")
}
if SystemAccessorID == "" {
if envID := os.Getenv("SYSTEM_ACCESSOR_ID"); envID != "" {
SystemAccessorID = envID

View File

@ -1,6 +1,7 @@
package lib
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
@ -148,3 +149,101 @@ func CallGeminiMultimodal(parts []GeminiPart, config *GeminiConfig) (string, err
return finalText, nil
}
// CallFireworks sends a request to the Fireworks AI API (OpenAI-compatible).
// messages should be OpenAI-format: []map[string]interface{} with "role" and "content" keys.
// For vision, content can be an array of {type: "text"/"image_url", ...} parts.
func CallFireworks(model string, messages []map[string]interface{}, maxTokens int) (string, error) {
if FireworksKey == "" {
return "", fmt.Errorf("Fireworks API key not configured")
}
stream := maxTokens > 4096
reqBody := map[string]interface{}{
"model": model,
"messages": messages,
"max_tokens": maxTokens,
"temperature": 0.1,
"stream": stream,
}
jsonBody, err := json.Marshal(reqBody)
if err != nil {
return "", fmt.Errorf("marshal request: %w", err)
}
req, err := http.NewRequest("POST", "https://api.fireworks.ai/inference/v1/chat/completions", bytes.NewReader(jsonBody))
if err != nil {
return "", fmt.Errorf("create request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+FireworksKey)
resp, err := http.DefaultClient.Do(req)
if err != nil {
return "", fmt.Errorf("API request: %w", err)
}
defer resp.Body.Close()
if !stream {
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", fmt.Errorf("read response: %w", err)
}
if resp.StatusCode != 200 {
return "", fmt.Errorf("Fireworks API error %d: %s", resp.StatusCode, string(body))
}
var oaiResp struct {
Choices []struct {
Message struct {
Content string `json:"content"`
} `json:"message"`
} `json:"choices"`
}
if err := json.Unmarshal(body, &oaiResp); err != nil {
return "", fmt.Errorf("parse response: %w", err)
}
if len(oaiResp.Choices) == 0 {
return "", fmt.Errorf("empty response from Fireworks")
}
text := strings.TrimSpace(oaiResp.Choices[0].Message.Content)
text = strings.TrimPrefix(text, "```json")
text = strings.TrimPrefix(text, "```")
text = strings.TrimSuffix(text, "```")
return strings.TrimSpace(text), nil
}
// Streaming: read SSE chunks and accumulate content
if resp.StatusCode != 200 {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("Fireworks API error %d: %s", resp.StatusCode, string(body))
}
var sb strings.Builder
scanner := bufio.NewScanner(resp.Body)
scanner.Buffer(make([]byte, 256*1024), 256*1024)
for scanner.Scan() {
line := scanner.Text()
if !strings.HasPrefix(line, "data: ") {
continue
}
data := line[6:]
if data == "[DONE]" {
break
}
var chunk struct {
Choices []struct {
Delta struct {
Content string `json:"content"`
} `json:"delta"`
} `json:"choices"`
}
if json.Unmarshal([]byte(data), &chunk) == nil && len(chunk.Choices) > 0 {
sb.WriteString(chunk.Choices[0].Delta.Content)
}
}
text := strings.TrimSpace(sb.String())
text = strings.TrimPrefix(text, "```json")
text = strings.TrimPrefix(text, "```")
text = strings.TrimSuffix(text, "```")
return strings.TrimSpace(text), nil
}

View File

@ -67,7 +67,7 @@ func Normalize(dossierID string, category int, progress ...func(processed, total
// 3. Call LLM with group keys (batched to stay within token limits)
mapping := make(map[string]normMapping)
batchSize := 100
batchSize := 50
for i := 0; i < len(keys); i += batchSize {
end := i + batchSize
if end > len(keys) {
@ -240,11 +240,13 @@ Return a JSON object where each key is the EXACT input name, value is {"name":"C
Test names:
%s`, nameList)
maxTokens := 8192
maxTokens := 32768
temp := 0.0
model := "gemini-3.1-pro-preview"
config := &GeminiConfig{
Temperature: &temp,
MaxOutputTokens: &maxTokens,
Model: &model,
}
resp, err := CallGeminiMultimodal([]GeminiPart{{Text: prompt}}, config)

View File

@ -3,13 +3,11 @@ package main
import (
"encoding/json"
"fmt"
"html/template"
"net/http"
"sort"
"strings"
"time"
"inou/lib"
"path/filepath"
"os"
)
// DossierSection represents a unified section block on the dossier page
@ -44,18 +42,19 @@ type TrackerButton struct {
// SectionItem represents a row in a section
type SectionItem struct {
ID string
Label string
Meta string // secondary text below label
Date string // YYYYMMDD format for JS formatting
Time string // "20:06 -0400" — shown alongside Date when present
Type string
Value string
LinkURL string
LinkTitle string
Expandable bool
Expanded bool
Children []SectionItem
ID string
Label string
Meta string // secondary text below label
Date string // YYYYMMDD format for JS formatting
Time string // "20:06 -0400" — shown alongside Date when present
Type string
Value string
LinkURL string
LinkTitle string
Expandable bool
Expanded bool
Children []SectionItem
SourceSpansJSON string // JSON-encoded source spans for doc pane highlighting
}
// SectionConfig defines how to build a section for a category
@ -79,7 +78,6 @@ var sectionConfigs = []SectionConfig{
{ID: "procedures", Category: lib.CategorySurgery, Color: "DC2626", HeadingKey: "section_procedures", HideEmpty: true},
{ID: "assessments", Category: lib.CategoryAssessment, Color: "7C3AED", HeadingKey: "section_assessments", HideEmpty: true},
{ID: "genetics", Category: lib.CategoryGenome, Color: "8B5CF6", HeadingKey: "section_genetics", HideEmpty: true, Dynamic: true, DynamicType: "genetics"},
{ID: "uploads", Color: "6366f1", HeadingKey: "section_uploads", HideEmpty: false},
{ID: "medications", Category: lib.CategoryMedication, Color: "8b5cf6", HeadingKey: "section_medications", HideEmpty: true},
{ID: "supplements", Category: lib.CategorySupplement, Color: "8b5cf6", HeadingKey: "section_supplements", HideEmpty: true},
{ID: "symptoms", Category: lib.CategorySymptom, Color: "F59E0B", HeadingKey: "section_symptoms", HideEmpty: true},
@ -167,23 +165,50 @@ func BuildDossierSections(targetID, targetHex string, target *lib.Dossier, p *li
}
case "labs":
section.Items, section.Summary = buildLabItems(targetID, lang, T)
section.Searchable = len(section.Items) > 5
orders, _ := lib.EntryQueryOld(targetID, lib.CategoryLab, "lab_order")
sort.Slice(orders, func(i, j int) bool { return orders[i].Timestamp > orders[j].Timestamp })
section.Searchable = true
if len(orders) == 0 {
section.Summary = T("no_lab_data")
} else {
section.Summary = fmt.Sprintf("%d orders", len(orders))
for _, order := range orders {
item := SectionItem{
ID: order.EntryID,
Label: order.Value,
Expandable: true,
}
var odata struct{ LocalTime string `json:"local_time"` }
if json.Unmarshal([]byte(order.Data), &odata) == nil && odata.LocalTime != "" {
if t, err := time.Parse(time.RFC3339, odata.LocalTime); err == nil {
item.Date = t.Format("20060102")
if t.Hour() != 0 || t.Minute() != 0 {
_, offset := t.Zone()
item.Time = fmt.Sprintf("%02d:%02d %s", t.Hour(), t.Minute(), offsetToTZName(offset))
}
}
}
if item.Date == "" && order.Timestamp > 0 {
item.Date = time.Unix(order.Timestamp, 0).Format("20060102")
}
section.Items = append(section.Items, item)
}
}
case "documents":
entries, _ := lib.EntryList(lib.SystemAccessorID, "", lib.CategoryDocument, &lib.EntryFilter{DossierID: targetID, Limit: 50})
section.Items = entriesToSectionItems(entries)
section.Summary = fmt.Sprintf("%d documents", len(entries))
section.Items = docEntriesToSectionItems(entries)
section.Summary = fmt.Sprintf("%d", len(entries))
case "procedures":
entries, _ := lib.EntryList(lib.SystemAccessorID, "", lib.CategorySurgery, &lib.EntryFilter{DossierID: targetID, Limit: 50})
section.Items = entriesToSectionItems(entries)
section.Summary = fmt.Sprintf("%d procedures", len(entries))
section.Summary = fmt.Sprintf("%d", len(entries))
case "assessments":
entries, _ := lib.EntryList(lib.SystemAccessorID, "", lib.CategoryAssessment, &lib.EntryFilter{DossierID: targetID, Limit: 50})
section.Items = entriesToSectionItems(entries)
section.Summary = fmt.Sprintf("%d assessments", len(entries))
section.Summary = fmt.Sprintf("%d", len(entries))
case "genetics":
genomeEntries, _ := lib.EntryList(lib.SystemAccessorID, "", lib.CategoryGenome, &lib.EntryFilter{DossierID: targetID, Limit: 1})
@ -194,24 +219,6 @@ func BuildDossierSections(targetID, targetHex string, target *lib.Dossier, p *li
}
// Items loaded dynamically via JS
case "uploads":
uploadDir := filepath.Join(uploadsDir, targetHex)
var uploadCount int
var uploadSize int64
filepath.Walk(uploadDir, func(path string, info os.FileInfo, err error) error {
if err == nil && !info.IsDir() { uploadCount++; uploadSize += info.Size() }
return nil
})
if uploadCount > 0 {
section.Summary = fmt.Sprintf("%d files, %s", uploadCount, formatSize(uploadSize))
} else {
section.Summary = T("no_files")
}
if canEdit {
section.ActionURL = fmt.Sprintf("/dossier/%s/upload", targetHex)
section.ActionLabel = T("manage")
}
case "vitals":
section.Summary = T("vitals_desc")
@ -224,13 +231,12 @@ func BuildDossierSections(targetID, targetHex string, target *lib.Dossier, p *li
if cfg.Category > 0 {
entries, _ := lib.EntryList(lib.SystemAccessorID, "", cfg.Category, &lib.EntryFilter{DossierID: targetID, Limit: 50})
section.Items = entriesToSectionItems(entries)
// Use section ID for summary (e.g., "2 medications" not "2 items")
section.Summary = fmt.Sprintf("%d %s", len(entries), cfg.ID)
section.Summary = fmt.Sprintf("%d", len(entries))
}
}
// Skip empty sections if configured to hide
if section.HideEmpty && len(section.Items) == 0 && !section.Dynamic && !section.ComingSoon && section.ID != "checkin" && section.ID != "uploads" {
if section.HideEmpty && len(section.Items) == 0 && !section.Dynamic && !section.ComingSoon && section.ID != "checkin" {
continue
}
@ -469,28 +475,158 @@ func buildLabItems(dossierID, lang string, T func(string) string) ([]SectionItem
return items, summary
}
// entriesToSectionItems converts Entry slice to SectionItem slice
func entriesToSectionItems(entries []*lib.Entry) []SectionItem {
// docEntriesToSectionItems converts document entries to section items with preview links.
func docEntriesToSectionItems(entries []*lib.Entry) []SectionItem {
var items []SectionItem
for _, e := range entries {
if e == nil {
continue
}
item := SectionItem{
ID: e.EntryID,
Label: e.Value,
Meta: e.Summary,
Type: e.Type,
ID: e.EntryID,
Label: e.Value,
Type: e.Type,
LinkURL: e.EntryID,
LinkTitle: "source",
}
if e.Timestamp > 0 {
// Convert Unix timestamp to YYYYMMDD
// item.Date = time.Unix(e.Timestamp, 0).Format("20060102")
item.Date = time.Unix(e.Timestamp, 0).Format("20060102")
}
items = append(items, item)
}
return items
}
// entriesToSectionItems converts Entry slice to SectionItem slice.
// Entries with Data fields become expandable with details as children.
func entriesToSectionItems(entries []*lib.Entry) []SectionItem {
// Internal Data keys that shouldn't be shown to the user
skipKeys := map[string]bool{
"source_doc_id": true,
"source": true,
"source_spans": true,
"summary_translated": true,
}
var items []SectionItem
for _, e := range entries {
if e == nil {
continue
}
// Use Summary as label when Value is empty (common for doc extracts)
label := e.Value
if label == "" {
label = e.Summary
}
item := SectionItem{
ID: e.EntryID,
Label: label,
Type: e.Type,
}
if e.Timestamp > 0 {
item.Date = time.Unix(e.Timestamp, 0).Format("20060102")
}
// Parse Data to build expandable children
if e.Data != "" {
var dataMap map[string]interface{}
if json.Unmarshal([]byte(e.Data), &dataMap) == nil {
// Collect keys in deterministic order: preferred fields first, then alphabetical
var keys []string
for k := range dataMap {
if !skipKeys[k] {
keys = append(keys, k)
}
}
sort.Slice(keys, func(i, j int) bool {
pi, pj := dataFieldPriority(keys[i]), dataFieldPriority(keys[j])
if pi != pj {
return pi < pj
}
return keys[i] < keys[j]
})
for _, k := range keys {
val := formatDataValue(k, dataMap[k])
if val == "" {
continue
}
item.Children = append(item.Children, SectionItem{
Label: k,
Value: val,
})
}
if len(item.Children) > 0 {
item.Expandable = true
}
// Link to source document
if docID, ok := dataMap["source_doc_id"].(string); ok && docID != "" {
item.LinkURL = docID
item.LinkTitle = "source"
}
// Source spans for doc pane highlighting
if spans, ok := dataMap["source_spans"]; ok {
if b, err := json.Marshal(spans); err == nil {
item.SourceSpansJSON = string(b)
}
}
// Show translation as secondary text
if tr, ok := dataMap["summary_translated"].(string); ok && tr != "" {
item.Meta = tr
}
}
}
items = append(items, item)
}
return items
}
// dataFieldPriority returns sort priority for data field keys (lower = first).
func dataFieldPriority(key string) int {
order := map[string]int{
"name": 1, "role": 2, "specialty": 3, "institution": 4,
"procedure": 5, "diagnosis": 5, "condition": 5, "therapy": 5,
"facility": 6, "surgeon": 6, "provider": 6,
"date": 7, "frequency": 8, "duration": 8,
"details": 10, "description": 10, "notes": 10,
"phone": 11, "address": 12,
}
if p, ok := order[key]; ok {
return p
}
return 9 // unlisted keys go before details/address
}
// formatDataValue renders a Data field value as a display string.
func formatDataValue(key string, v interface{}) string {
switch val := v.(type) {
case string:
return val
case float64:
if val == float64(int(val)) {
return fmt.Sprintf("%d", int(val))
}
return fmt.Sprintf("%g", val)
case bool:
if val {
return "yes"
}
return "no"
case map[string]interface{}:
// Flatten nested objects (e.g. settings: {pressure: "5 cmH₂O"})
var parts []string
for k, sv := range val {
s := formatDataValue(k, sv)
if s != "" {
parts = append(parts, k+": "+s)
}
}
return strings.Join(parts, ", ")
default:
return ""
}
}
// buildLoincNameMap builds a JSON map of LOINC code → full test name
// for displaying full names in charts.
func buildLoincNameMap() string {
@ -676,6 +812,91 @@ func formatSize(bytes int64) string {
return fmt.Sprintf("%.1f MB", float64(bytes)/(1024*1024))
}
// handleDocumentView returns the markdown content of a document entry.
// GET /dossier/{dossierID}/document/{docID}
func handleDocumentView(w http.ResponseWriter, r *http.Request) {
p := getLoggedInDossier(r)
if p == nil {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
parts := strings.Split(r.URL.Path, "/")
// /dossier/{id}/document/{docID} → parts[2]=id, parts[4]=docID
if len(parts) < 5 {
http.NotFound(w, r)
return
}
targetID := parts[2]
docID := parts[4]
// RBAC check
if _, err := lib.DossierGet(p.DossierID, targetID); err != nil {
http.Error(w, "Forbidden", http.StatusForbidden)
return
}
entry, err := lib.EntryGet(nil, docID)
if err != nil || entry.DossierID != targetID || entry.Category != lib.CategoryDocument {
http.NotFound(w, r)
return
}
// Serve original PDF if ?pdf=1
if r.URL.Query().Get("pdf") == "1" {
var docData struct {
SourceUpload string `json:"source_upload"`
}
json.Unmarshal([]byte(entry.Data), &docData)
if docData.SourceUpload == "" {
http.Error(w, "No PDF available", http.StatusNotFound)
return
}
uploadEntry, err := lib.EntryGet(nil, docData.SourceUpload)
if err != nil {
http.Error(w, "Upload not found", http.StatusNotFound)
return
}
var uploadData struct {
Path string `json:"path"`
}
json.Unmarshal([]byte(uploadEntry.Data), &uploadData)
if uploadData.Path == "" {
http.Error(w, "No file path", http.StatusNotFound)
return
}
pdfBytes, err := lib.DecryptFile(uploadData.Path)
if err != nil {
http.Error(w, "Decrypt failed", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/pdf")
w.Header().Set("Content-Disposition", fmt.Sprintf(`inline; filename="%s"`, entry.Value))
w.Write(pdfBytes)
return
}
var data struct {
Markdown string `json:"markdown"`
MarkdownTranslated string `json:"markdown_translated"`
TranslatedTo string `json:"translated_to"`
SourceUpload string `json:"source_upload"`
}
json.Unmarshal([]byte(entry.Data), &data)
resp := map[string]interface{}{
"markdown": data.Markdown,
"title": entry.Value,
"has_pdf": data.SourceUpload != "",
}
if data.MarkdownTranslated != "" {
resp["markdown_translated"] = data.MarkdownTranslated
resp["translated_to"] = data.TranslatedTo
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(resp)
}
// handleDossierV2 renders the new unified dossier page
func handleDossierV2(w http.ResponseWriter, r *http.Request) {
p := getLoggedInDossier(r)
@ -723,11 +944,6 @@ func handleDossierV2(w http.ResponseWriter, r *http.Request) {
// Build sections
sections := BuildDossierSections(targetID, targetHex, target, p, lang, canEdit)
// Build lab reference data for charts
labRefJSON := template.JS(buildLabRefData(targetID, target.DOB, target.Sex))
labSearchJSON := template.JS(buildLabSearchIndex())
loincNameJSON := template.JS(buildLoincNameMap())
render(w, r, PageData{
Page: "dossier",
Lang: lang,
@ -740,8 +956,197 @@ func handleDossierV2(w http.ResponseWriter, r *http.Request) {
AccessList: accessList,
HasGenome: hasGenome,
Sections: sections,
LabRefJSON: labRefJSON,
LabSearchJSON: labSearchJSON,
LoincNameJSON: loincNameJSON,
})
}
// handleLabSearch serves lab data as JSON.
// GET /dossier/{id}/labs?order={entryID} — children for one order (expand)
// GET /dossier/{id}/labs?q=sodium — search across all orders
func handleLabSearch(w http.ResponseWriter, r *http.Request) {
p := getLoggedInDossier(r)
if p == nil { http.Error(w, "Unauthorized", http.StatusUnauthorized); return }
parts := strings.Split(r.URL.Path, "/")
if len(parts) < 3 { http.NotFound(w, r); return }
targetID := parts[2]
target, err := lib.DossierGet(p.DossierID, targetID)
if err != nil { http.Error(w, "Forbidden", http.StatusForbidden); return }
type childJSON struct {
Label string `json:"label"`
Loinc string `json:"loinc"`
}
type refInfo struct {
Direction string `json:"direction"`
RefLow float64 `json:"refLow"`
RefHigh float64 `json:"refHigh"`
}
type abbrInfo struct {
abbr string
siFactor float64
}
// Shared: build ref data from loinc→abbr map
buildRefs := func(loincAbbrs map[string]abbrInfo) map[string]refInfo {
refs := make(map[string]refInfo)
ageDays := int64(0)
if !target.DOB.IsZero() {
ageDays = lib.AgeDays(target.DOB.Unix(), time.Now().Unix())
}
sexStr := ""
switch target.Sex {
case 1: sexStr = "M"
case 2: sexStr = "F"
}
for loinc, info := range loincAbbrs {
test, err := lib.LabTestGet(loinc)
if err != nil || test == nil { continue }
ref, err := lib.LabRefLookup(loinc, sexStr, ageDays)
if err != nil || ref == nil { continue }
low := lib.FromLabScale(ref.RefLow) / info.siFactor
high := lib.FromLabScale(ref.RefHigh) / info.siFactor
refs[info.abbr] = refInfo{Direction: test.Direction, RefLow: low, RefHigh: high}
}
return refs
}
// Shared: extract child JSON + track loinc abbreviations
childToJSON := func(entries []*lib.Entry, loincAbbrs map[string]abbrInfo) []childJSON {
var out []childJSON
for _, c := range entries {
var data struct {
Loinc string `json:"loinc"`
Abbr string `json:"abbreviation"`
SIF float64 `json:"si_factor"`
}
json.Unmarshal([]byte(c.Data), &data)
out = append(out, childJSON{Label: c.Summary, Loinc: data.Loinc})
if data.Loinc != "" && data.Abbr != "" {
if _, exists := loincAbbrs[data.Loinc]; !exists {
f := data.SIF
if f == 0 { f = 1.0 }
loincAbbrs[data.Loinc] = abbrInfo{abbr: data.Abbr, siFactor: f}
}
}
}
return out
}
w.Header().Set("Content-Type", "application/json")
// Mode 1: expand a single order
if orderID := r.URL.Query().Get("order"); orderID != "" {
children, _ := lib.EntryChildren(targetID, orderID)
loincAbbrs := make(map[string]abbrInfo)
childrenOut := childToJSON(children, loincAbbrs)
if childrenOut == nil { childrenOut = []childJSON{} }
json.NewEncoder(w).Encode(struct {
Children []childJSON `json:"children"`
Refs map[string]refInfo `json:"refs"`
}{Children: childrenOut, Refs: buildRefs(loincAbbrs)})
return
}
// Mode 2: search
q := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("q")))
if len(q) < 2 {
w.Write([]byte(`{"orders":[],"refs":{}}`))
return
}
// Build search index: term → []loinc
var tests []lib.LabTest
lib.RefQuery("SELECT loinc_id, name FROM lab_test", nil, &tests)
searchIndex := make(map[string][]string)
for _, test := range tests {
name := strings.ToLower(test.Name)
if !contains(searchIndex[name], test.LoincID) {
searchIndex[name] = append(searchIndex[name], test.LoincID)
}
for _, word := range strings.Fields(name) {
word = strings.Trim(word, "()")
if len(word) >= 3 && !contains(searchIndex[word], test.LoincID) {
searchIndex[word] = append(searchIndex[word], test.LoincID)
}
}
}
matchLoincs := make(map[string]bool)
for term, loincs := range searchIndex {
if strings.Contains(term, q) {
for _, l := range loincs { matchLoincs[l] = true }
}
}
type orderJSON struct {
ID string `json:"id"`
Name string `json:"name"`
Date string `json:"date"`
Time string `json:"time"`
Count int `json:"count"`
Children []childJSON `json:"children"`
}
orders, _ := lib.EntryQueryOld(targetID, lib.CategoryLab, "lab_order")
var matchedOrders []orderJSON
loincAbbrs := make(map[string]abbrInfo)
for _, order := range orders {
children, _ := lib.EntryChildren(targetID, order.EntryID)
orderNameMatch := strings.Contains(strings.ToLower(order.Value), q)
var matched []childJSON
for _, c := range children {
var data struct {
Loinc string `json:"loinc"`
Abbr string `json:"abbreviation"`
SIF float64 `json:"si_factor"`
}
json.Unmarshal([]byte(c.Data), &data)
textMatch := strings.Contains(strings.ToLower(c.Summary), q)
loincMatch := data.Loinc != "" && matchLoincs[data.Loinc]
if orderNameMatch || textMatch || loincMatch {
matched = append(matched, childJSON{Label: c.Summary, Loinc: data.Loinc})
if data.Loinc != "" && data.Abbr != "" {
if _, exists := loincAbbrs[data.Loinc]; !exists {
f := data.SIF
if f == 0 { f = 1.0 }
loincAbbrs[data.Loinc] = abbrInfo{abbr: data.Abbr, siFactor: f}
}
}
}
}
if len(matched) == 0 { continue }
oj := orderJSON{ID: order.EntryID, Name: order.Value, Count: len(matched), Children: matched}
var odata struct{ LocalTime string `json:"local_time"` }
if json.Unmarshal([]byte(order.Data), &odata) == nil && odata.LocalTime != "" {
if t, err := time.Parse(time.RFC3339, odata.LocalTime); err == nil {
oj.Date = t.Format("20060102")
if t.Hour() != 0 || t.Minute() != 0 {
_, offset := t.Zone()
oj.Time = fmt.Sprintf("%02d:%02d %s", t.Hour(), t.Minute(), offsetToTZName(offset))
}
}
}
if oj.Date == "" && order.Timestamp > 0 {
oj.Date = time.Unix(order.Timestamp, 0).Format("20060102")
}
matchedOrders = append(matchedOrders, oj)
}
// LOINC name map
loincNameMap := make(map[string]string)
for _, t := range tests {
if matchLoincs[t.LoincID] { loincNameMap[t.LoincID] = t.Name }
}
if matchedOrders == nil { matchedOrders = []orderJSON{} }
json.NewEncoder(w).Encode(struct {
Orders []orderJSON `json:"orders"`
Refs map[string]refInfo `json:"refs"`
LoincNames map[string]string `json:"loincNames"`
}{Orders: matchedOrders, Refs: buildRefs(loincAbbrs), LoincNames: loincNameMap})
}

View File

@ -77,6 +77,26 @@ section_medications: "Medications"
section_records: "Records"
section_journal: "Journal"
section_checkin: "Daily Check-in"
section_procedures: "Procedures"
section_assessments: "Assessments"
section_genetics: "Genetics"
section_supplements: "Supplements"
section_symptoms: "Symptoms"
section_hospitalizations: "Hospitalizations"
section_therapies: "Therapies"
section_consultations: "Consultations"
section_diagnoses: "Diagnoses"
section_exercise: "Exercise"
section_nutrition: "Nutrition"
section_fertility: "Fertility"
section_notes: "Notes"
section_history: "Medical History"
section_family_history: "Family History"
section_birth: "Birth"
section_devices: "Devices"
section_providers: "Providers"
section_questions: "Questions"
section_privacy: "Privacy"
# Daily Check-in
checkin_summary: "Track vitals, medications, symptoms"

View File

@ -137,9 +137,6 @@ type PageData struct {
SelectedRole string
// Dossier: unified sections
Sections []DossierSection
LabRefJSON template.JS // JSON: abbreviation → {direction, refLow, refHigh}
LabSearchJSON template.JS // JSON: search term → []loinc
LoincNameJSON template.JS // JSON: loinc → full test name
}
type CategoryAccess struct {
@ -1875,6 +1872,8 @@ func setupMux() http.Handler {
} else if strings.Contains(path, "/files/") && strings.HasSuffix(path, "/undo") { handleUndoImport(w, r)
} else if strings.HasSuffix(path, "/process-imaging") { handleProcessImaging(w, r)
} else if strings.HasSuffix(path, "/process-status") { handleProcessStatus(w, r)
} else if strings.HasSuffix(path, "/labs") { handleLabSearch(w, r)
} else if strings.Contains(path, "/document/") { handleDocumentView(w, r)
} else { handleDossierV2(w, r) }
})
mux.HandleFunc("/viewer/", func(w http.ResponseWriter, r *http.Request) {
@ -1932,6 +1931,15 @@ func main() {
fmt.Println("lib.DBInit successful")
lib.ConfigInit()
// Initialize prompts directory (shared with API for extraction prompts)
// Deployed layout: /tank/inou/bin/portal, prompts at /tank/inou/tracker_prompts/
exe, _ := os.Executable()
promptsDir := filepath.Join(filepath.Dir(exe), "..", "tracker_prompts")
if _, err := os.Stat(promptsDir); os.IsNotExist(err) {
promptsDir = "api/tracker_prompts" // Dev fallback
}
lib.InitPrompts(promptsDir)
loadTranslations()
lib.TranslateInit("lang") // also init lib translations for CategoryTranslate
loadTemplates()

View File

@ -1156,6 +1156,9 @@ a:hover {
.data-label {
font-weight: 500;
color: var(--text);
overflow-wrap: break-word;
word-break: break-word;
min-width: 0;
}
.data-meta {
@ -1194,16 +1197,35 @@ a:hover {
color: var(--text-muted);
text-align: right;
}
.search-wrap {
position: relative;
margin-left: auto;
width: 220px;
}
.search-input {
padding: 0.3rem 0.5rem;
padding: 0.3rem 1.5rem 0.3rem 0.5rem;
border: 1px solid var(--border);
border-radius: 6px;
font-size: 0.8rem;
background: var(--bg);
color: var(--text);
outline: none;
width: 220px;
margin-left: auto;
width: 100%;
box-sizing: border-box;
}
.search-clear {
position: absolute;
right: 6px;
top: 50%;
transform: translateY(-50%);
cursor: pointer;
color: var(--text-muted);
font-size: 1rem;
line-height: 1;
display: none;
}
.search-input:not(:placeholder-shown) + .search-clear {
display: block;
}
.search-input:focus {
border-color: var(--primary);
@ -1539,6 +1561,9 @@ a:hover {
.sg-card-content-sm { padding: 24px; max-width: 480px; }
.sg-profile-card { padding: 20px; min-height: 140px; display: flex; flex-direction: column; }
.sg-profile-card h3 { font-size: 1.25rem; margin-bottom: 4px; }
.card-actions { position: absolute; top: 14px; right: 14px; display: flex; gap: 4px; }
.card-actions a { color: var(--text-muted); text-decoration: none; padding: 2px 5px; font-size: 1.1rem; line-height: 1; border-radius: 4px; }
.card-actions a:hover { color: var(--accent); background: var(--accent-light); }
.sg-profile-card .card-meta { margin-bottom: 8px; }
.sg-profile-dob { font-size: 0.85rem; color: var(--text-muted); margin-bottom: 12px; }
.sg-profile-stats { display: flex; gap: 16px; font-size: 0.8rem; color: var(--text-muted); margin-bottom: 12px; }
@ -2010,3 +2035,74 @@ a:hover {
.build-profile-btn-icon {
font-size: 1rem;
}
/* Detail key labels in expanded data rows */
.detail-key {
color: var(--text-muted);
text-transform: capitalize;
min-width: 100px;
flex: 0 0 auto !important;
}
.data-row.child .data-value {
color: var(--text);
text-align: right;
}
/* Document side pane */
.doc-pane-overlay {
position: fixed;
inset: 0;
background: rgba(0,0,0,0.3);
z-index: 999;
}
.doc-pane {
position: fixed;
top: 0;
right: 0;
width: min(600px, 90vw);
height: 100vh;
background: var(--bg-card);
box-shadow: -4px 0 24px rgba(0,0,0,0.12);
z-index: 1000;
display: flex;
flex-direction: column;
}
.doc-pane-header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 16px 20px;
border-bottom: 1px solid var(--border);
flex-shrink: 0;
}
.doc-pane-title {
font-weight: 600;
font-size: 1rem;
}
.doc-pane-body {
flex: 1;
overflow-y: auto;
padding: 20px;
font-size: 0.9rem;
line-height: 1.7;
}
.doc-pane-body h2, .doc-pane-body h3, .doc-pane-body h4 {
margin: 16px 0 8px;
font-weight: 600;
}
.doc-pane-body p { margin-bottom: 8px; }
.doc-pane-body li { margin-left: 20px; margin-bottom: 4px; }
.doc-pane-body hr { border: none; border-top: 1px solid var(--border); margin: 16px 0; }
.doc-pane-body table.doc-table { width: 100%; border-collapse: collapse; margin: 12px 0; font-size: 0.85rem; }
.doc-pane-body table.doc-table th, .doc-pane-body table.doc-table td { border: 1px solid var(--border); padding: 6px 10px; text-align: left; }
.doc-pane-body table.doc-table th { background: var(--bg-muted); font-weight: 600; }
.doc-pane-body strong { font-weight: 600; }
.doc-pane-tabs { display: flex; gap: 0; margin-left: 16px; }
.doc-pane-tab { background: none; border: none; padding: 4px 12px; cursor: pointer; font-size: 0.8rem; color: var(--text-muted); border-bottom: 2px solid transparent; }
.doc-pane-tab.active { color: var(--text); border-bottom-color: var(--accent); font-weight: 500; }
.doc-highlight {
background: var(--accent-light);
border-left: 3px solid var(--accent);
padding: 4px 8px;
border-radius: 2px;
}

View File

@ -6,7 +6,10 @@
<div class="profiles-grid" style="grid-template-columns: repeat(auto-fill, minmax(300px, 1fr));">
<!-- Self dossier -->
<div class="card sg-profile-card" style="position: relative;">
<a href="/dossier/{{.Dossier.DossierID}}/edit" class="edit-link" title="{{.T.edit}}" style="position: absolute; top: 16px; right: 16px; color: var(--text-muted); text-decoration: none; padding: 4px;">✎</a>
<div class="card-actions">
<a href="/dossier/{{.Dossier.DossierID}}/upload" title="{{.T.upload_files}}">&#8682;</a>
<a href="/dossier/{{.Dossier.DossierID}}/edit" title="{{.T.edit}}">&#9998;</a>
</div>
<a href="/dossier/{{.Dossier.DossierID}}" style="text-decoration: none; color: inherit; display: contents;">
<h3>{{.Dossier.Name}}</h3>
<p class="card-meta">{{.T.you}}</p>
@ -27,12 +30,15 @@
<span class="btn btn-primary btn-small" style="margin-top: auto; align-self: flex-start;">{{.T.view}}</span>
</a>
</div>
<!-- Accessible dossiers -->
{{range .AccessibleDossiers}}
<div class="card sg-profile-card" style="position: relative;">
{{if .CanEdit}}<a href="/dossier/{{.DossierID}}/edit" class="edit-link" title="{{$.T.edit}}" style="position: absolute; top: 16px; right: 16px; color: var(--text-muted); text-decoration: none; padding: 4px;">✎</a>{{end}}
{{if eq .RelationInt 99}}<form method="POST" action="/dossier/{{.DossierID}}/revoke" style="position: absolute; top: 16px; right: 16px; margin: 0;" onsubmit="return confirm('Remove demo dossier from your list?')"><input type="hidden" name="accessor_id" value="{{$.Dossier.DossierID}}"><button type="submit" class="edit-link" title="{{$.T.remove}}" style="background: none; border: none; color: var(--text-muted); cursor: pointer; padding: 4px;">✕</button></form>{{end}}
{{if .CanEdit}}<div class="card-actions">
<a href="/dossier/{{.DossierID}}/upload" title="{{$.T.upload_files}}">&#8682;</a>
<a href="/dossier/{{.DossierID}}/edit" title="{{$.T.edit}}">&#9998;</a>
</div>{{end}}
{{if eq .RelationInt 99}}<form method="POST" action="/dossier/{{.DossierID}}/revoke" style="position: absolute; top: 16px; right: 16px; margin: 0;" onsubmit="return confirm('Remove demo dossier from your list?')"><input type="hidden" name="accessor_id" value="{{$.Dossier.DossierID}}"><button type="submit" class="edit-link" title="{{$.T.remove}}" style="background: none; border: none; color: var(--text-muted); cursor: pointer; padding: 4px;">&#10005;</button></form>{{end}}
<a href="/dossier/{{.DossierID}}" style="text-decoration: none; color: inherit; display: contents;">
<h3>{{.Name}}</h3>
<p class="card-meta">{{if eq .RelationInt 99}}{{$.T.role}}: {{.Relation}}{{else}}{{$.T.my_role}}: {{.Relation}}{{if .IsCareReceiver}} · <span class="badge badge-care">{{$.T.care}}</span>{{end}}{{end}}</p>

View File

@ -67,6 +67,21 @@
{{template "footer"}}
</div>
{{/* Document Side Pane */}}
<div id="doc-pane-overlay" class="doc-pane-overlay" style="display:none;" onclick="closeDocPane()"></div>
<div id="doc-pane" class="doc-pane" style="display:none;">
<div class="doc-pane-header">
<span class="doc-pane-title">Source Document</span>
<div id="doc-pane-tabs" class="doc-pane-tabs" style="display:none;">
<button class="doc-pane-tab active" onclick="switchDocTab('original')">Original</button>
<button class="doc-pane-tab" onclick="switchDocTab('translated')">Translated</button>
</div>
<a id="doc-pane-pdf" class="btn btn-small btn-secondary" href="#" target="_blank" style="display:none; margin-left:auto; margin-right:8px;">PDF</a>
<button class="btn-icon" onclick="closeDocPane()">&times;</button>
</div>
<div class="doc-pane-body" id="doc-pane-body"></div>
</div>
{{/* Genetics Warning Modal */}}
<div id="genetics-warning-modal" class="modal" style="display:none;">
<div class="modal-content" style="max-width: 520px;">
@ -89,18 +104,238 @@
<script>
const dossierGUID = "{{.TargetDossier.DossierID}}";
const userLang = "{{.Lang}}";
const labRefData = {{if .LabRefJSON}}{{.LabRefJSON}}{{else}}{}{{end}};
const labSearchIndex = {{if .LabSearchJSON}}{{.LabSearchJSON}}{{else}}{}{{end}};
const loincNames = {{if .LoincNameJSON}}{{.LoincNameJSON}}{{else}}{}{{end}};
let labRefData = {};
let loincNames = {};
// Section expand/collapse
// Document side pane
let docCache = {}; // docID → {html, htmlTranslated, markdown, hasPDF, hasTranslation}
let currentDocID = null;
let currentDocTab = 'original';
async function openDocPane(docID, spansJSON, fallbackText) {
const pane = document.getElementById('doc-pane');
const overlay = document.getElementById('doc-pane-overlay');
const body = document.getElementById('doc-pane-body');
pane.style.display = '';
overlay.style.display = '';
document.body.style.overflow = 'hidden';
currentDocID = docID;
currentDocTab = 'original';
// Reset PDF button and tabs
const pdfBtn = document.getElementById('doc-pane-pdf');
if (pdfBtn) pdfBtn.style.display = 'none';
const tabs = document.getElementById('doc-pane-tabs');
if (tabs) tabs.style.display = 'none';
let doc;
if (docCache[docID]) {
doc = docCache[docID];
} else {
body.innerHTML = '<p class="text-muted">Loading...</p>';
try {
const resp = await fetch(`/dossier/${dossierGUID}/document/${docID}`);
if (!resp.ok) throw new Error('Not found');
const data = await resp.json();
doc = {
html: markdownToHTML(data.markdown || ''),
markdown: data.markdown || '',
hasPDF: data.has_pdf,
hasTranslation: !!data.markdown_translated,
htmlTranslated: data.markdown_translated ? markdownToHTML(data.markdown_translated) : '',
};
docCache[docID] = doc;
} catch (e) {
body.innerHTML = '<p class="text-muted">Could not load document</p>';
return;
}
}
body.innerHTML = doc.html;
// Show PDF button if original is available
if (doc.hasPDF && pdfBtn) {
pdfBtn.href = `/dossier/${dossierGUID}/document/${docID}?pdf=1`;
pdfBtn.style.display = '';
}
// Show translation tabs if translated version exists
if (doc.hasTranslation && tabs) {
tabs.style.display = '';
tabs.querySelectorAll('.doc-pane-tab').forEach(t => t.classList.toggle('active', t.textContent === 'Original'));
}
// Try source_spans first, fall back to keyword matching
let spans = [];
try { if (spansJSON) spans = JSON.parse(spansJSON); } catch(e) {}
if (spans.length > 0) {
highlightBySpans(body, doc.markdown, spans);
} else if (fallbackText) {
highlightByKeywords(body, fallbackText);
}
}
function switchDocTab(tab) {
if (!currentDocID || !docCache[currentDocID]) return;
currentDocTab = tab;
const doc = docCache[currentDocID];
const body = document.getElementById('doc-pane-body');
body.innerHTML = tab === 'translated' ? doc.htmlTranslated : doc.html;
document.querySelectorAll('#doc-pane-tabs .doc-pane-tab').forEach(t => {
t.classList.toggle('active', (tab === 'original' && t.textContent === 'Original') || (tab === 'translated' && t.textContent === 'Translated'));
});
}
function closeDocPane() {
document.getElementById('doc-pane').style.display = 'none';
document.getElementById('doc-pane-overlay').style.display = 'none';
document.body.style.overflow = '';
}
// Highlight using LLM-provided source spans (start/end verbatim text)
function highlightBySpans(body, markdown, spans) {
body.querySelectorAll('.doc-highlight').forEach(el => el.classList.remove('doc-highlight'));
const paras = body.querySelectorAll('p, li, h2, h3, h4');
let firstHighlight = null;
for (const span of spans) {
if (!span.start) continue;
// Extract significant words (LLM may paraphrase, so match by keywords)
const startWords = extractWords(span.start);
const endWords = extractWords(span.end || '');
// Score each paragraph for start/end match
let inSpan = false;
for (const el of paras) {
const t = el.textContent.toLowerCase();
if (!inSpan && fuzzyMatch(t, startWords) >= 0.6) {
inSpan = true;
el.classList.add('doc-highlight');
if (!firstHighlight) firstHighlight = el;
if (endWords.length === 0 || fuzzyMatch(t, endWords) >= 0.6) { inSpan = false; continue; }
} else if (inSpan) {
el.classList.add('doc-highlight');
if (endWords.length > 0 && fuzzyMatch(t, endWords) >= 0.6) inSpan = false;
}
}
}
if (firstHighlight) firstHighlight.scrollIntoView({ behavior: 'smooth', block: 'center' });
}
// Extract significant words (>= 4 chars) from text, lowercased
function extractWords(text) {
if (!text) return [];
return text.toLowerCase().split(/[\s,.:;()\-\/]+/).filter(w => w.length >= 4);
}
// Fuzzy match: fraction of keywords found in text (0.0 - 1.0)
function fuzzyMatch(text, keywords) {
if (keywords.length === 0) return 0;
let hits = 0;
for (const kw of keywords) { if (text.includes(kw)) hits++; }
return hits / keywords.length;
}
// Fallback: highlight by keyword matching
function highlightByKeywords(body, text) {
body.querySelectorAll('.doc-highlight').forEach(el => el.classList.remove('doc-highlight'));
const skip = new Set(['the','and','for','with','from','that','this','was','age','bei','und','der','die','des','den','dem','mit','von','aus','nach','zur','zum','ein','eine','eines','einer']);
const keywords = text.split(/[\s,.:;()\-\/]+/).map(w => w.trim()).filter(w => w.length >= 3 && !skip.has(w.toLowerCase()));
if (keywords.length === 0) return;
const paras = body.querySelectorAll('p, li, h2, h3, h4');
let bestEl = null, bestScore = 0;
paras.forEach(el => {
const t = el.textContent.toLowerCase();
let score = 0;
for (const kw of keywords) { if (t.includes(kw.toLowerCase())) score++; }
if (score > bestScore) { bestScore = score; bestEl = el; }
});
if (bestEl && bestScore > 0) {
bestEl.classList.add('doc-highlight');
bestEl.scrollIntoView({ behavior: 'smooth', block: 'center' });
}
}
// Markdown→HTML with inline formatting
function markdownToHTML(md) {
// Inline formatting: escape then apply bold/italic
function inline(text) {
let s = esc(text);
s = s.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>');
s = s.replace(/\*(.+?)\*/g, '<em>$1</em>');
return s;
}
let inTable = false;
let tableRows = [];
function flushTable() {
if (tableRows.length === 0) return '';
// First row = header, skip separator row (---|---)
let html = '<table class="doc-table"><thead><tr>';
const headers = tableRows[0].split('|').map(c => c.trim()).filter(c => c);
for (const h of headers) html += `<th>${inline(h)}</th>`;
html += '</tr></thead><tbody>';
for (let i = 1; i < tableRows.length; i++) {
if (tableRows[i].match(/^[\s|:-]+$/)) continue; // skip separator
const cells = tableRows[i].split('|').map(c => c.trim()).filter(c => c);
html += '<tr>';
for (const c of cells) html += `<td>${inline(c)}</td>`;
html += '</tr>';
}
html += '</tbody></table>';
tableRows = [];
return html;
}
const lines = md.split('\n');
let out = [];
for (const line of lines) {
// Table rows
if (line.includes('|') && line.trim().startsWith('|')) {
tableRows.push(line);
continue;
}
if (tableRows.length > 0) out.push(flushTable());
if (line.match(/^---+$/)) { out.push('<hr>'); continue; }
if (line.match(/^#### /)) { out.push(`<h5>${inline(line.slice(5))}</h5>`); continue; }
if (line.match(/^### /)) { out.push(`<h4>${inline(line.slice(4))}</h4>`); continue; }
if (line.match(/^## /)) { out.push(`<h3>${inline(line.slice(3))}</h3>`); continue; }
if (line.match(/^# /)) { out.push(`<h2>${inline(line.slice(2))}</h2>`); continue; }
if (line.match(/^[-*] /)) { out.push(`<li>${inline(line.slice(2))}</li>`); continue; }
if (line.match(/^\d+\. /)) { out.push(`<li>${inline(line.replace(/^\d+\.\s*/, ''))}</li>`); continue; }
if (line.trim() === '') { out.push('<br>'); continue; }
out.push(`<p>${inline(line)}</p>`);
}
if (tableRows.length > 0) out.push(flushTable());
return out.join('\n');
}
// Section expand/collapse (with lazy loading for labs)
function toggleSection(el) {
el.classList.toggle('expanded');
const icon = el.querySelector('.expand-icon');
if (icon) icon.textContent = el.classList.contains('expanded') ? '' : '+';
const children = el.nextElementSibling;
if (children && children.classList.contains('section-children')) {
children.classList.toggle('show');
if (!children || !children.classList.contains('section-children')) return;
children.classList.toggle('show');
// Lazy-load lab children on first expand
const card = el.closest('.data-card');
const entryID = el.dataset.entryId;
if (card && card.id === 'section-labs' && entryID && !children.dataset.loaded && el.classList.contains('expanded')) {
children.dataset.loaded = 'true';
children.innerHTML = '<div class="data-row child"><span class="text-muted">Loading...</span></div>';
fetch(`/dossier/${dossierGUID}/labs?order=${entryID}`)
.then(r => r.json())
.then(data => {
// Merge ref data
Object.assign(labRefData, data.refs || {});
let html = '';
for (const c of (data.children || [])) {
html += `<div class="data-row child"${c.loinc ? ` data-loinc="${c.loinc}"` : ''}><span class="data-label">${esc(c.label)}</span></div>`;
}
children.innerHTML = html || '<div class="data-row child"><span class="text-muted">No results</span></div>';
})
.catch(() => { children.innerHTML = '<div class="data-row child"><span class="text-muted">Error loading</span></div>'; });
}
}
@ -113,29 +348,51 @@ document.querySelectorAll('[data-date]').forEach(el => {
}
});
// Helper: Look up LOINC codes for a search term
function getMatchingLoincs(query) {
const matchLoincs = new Set();
if (!labSearchIndex || !query) return matchLoincs;
const q = query.toLowerCase();
for (const [term, loincs] of Object.entries(labSearchIndex)) {
if (term.includes(q)) {
loincs.forEach(loinc => matchLoincs.add(loinc));
}
}
return matchLoincs;
// Lab search: debounced fetch from server
let labSearchTimeout;
let labOriginalHTML = null; // saved order list for restore on clear
function clearSearch(btn) {
const input = btn.parentElement.querySelector('.search-input');
input.value = '';
filterSection(input);
input.focus();
}
// Filter/search within a section
function filterSection(input) {
const table = input.closest('.data-card').querySelector('.data-table');
const card = input.closest('.data-card');
const sectionId = card.id.replace('section-', '');
if (sectionId === 'labs') {
clearTimeout(labSearchTimeout);
const q = input.value.trim();
const table = card.querySelector('.data-table');
if (q.length < 2) {
// Restore original order list
if (table && labOriginalHTML !== null) {
table.innerHTML = labOriginalHTML;
labOriginalHTML = null;
}
const chart = card.querySelector('.filter-chart');
if (chart) chart.style.display = 'none';
return;
}
// Save original HTML before first search
if (table && labOriginalHTML === null) {
labOriginalHTML = table.innerHTML;
}
labSearchTimeout = setTimeout(() => loadLabResults(card, q), 300);
return;
}
// Non-lab sections: client-side filtering
const table = card.querySelector('.data-table');
if (!table) return;
const q = input.value.toLowerCase().trim();
const rows = table.querySelectorAll('.data-row, .section-children');
const showMore = table.querySelector('.show-more');
if (!q) {
// Reset: restore hidden-row, collapse all
rows.forEach(row => {
if (row.classList.contains('section-children')) {
row.classList.remove('show');
@ -156,42 +413,26 @@ function filterSection(input) {
return;
}
// Look up LOINC codes for this search term
const matchLoincs = getMatchingLoincs(q);
// Hide show-more when filtering
if (showMore) showMore.style.display = 'none';
// Check each expandable/single row
table.querySelectorAll('.data-row.expandable, .data-row.single').forEach(row => {
const label = (row.querySelector('.data-label')?.textContent || '').toLowerCase();
const children = row.nextElementSibling;
let childMatch = false;
if (children && children.classList.contains('section-children')) {
children.querySelectorAll('.data-row.child').forEach(c => {
const cl = (c.querySelector('.data-label')?.textContent || '').toLowerCase();
const textMatch = cl.includes(q);
// Also check LOINC code match for lab results
const childLoinc = c.dataset.loinc;
const loincMatch = childLoinc && matchLoincs.has(childLoinc);
const matches = textMatch || loincMatch;
const matches = cl.includes(q);
if (matches) childMatch = true;
c.style.display = matches ? '' : 'none';
});
}
const labelMatch = label.includes(q);
const match = labelMatch || childMatch;
row.classList.remove('hidden-row');
row.style.display = match ? '' : 'none';
if (children && children.classList.contains('section-children')) {
children.classList.remove('hidden-row');
if (match) {
// If parent label matches, show all children
if (labelMatch && !childMatch) {
children.querySelectorAll('.data-row.child').forEach(c => c.style.display = '');
}
@ -206,11 +447,72 @@ function filterSection(input) {
}
}
});
// Auto-chart: collect numeric values from visible matching children
renderFilterChart(input.closest('.data-card'), table, q);
}
async function loadLabResults(card, query) {
const table = card.querySelector('.data-table');
if (!table) return;
try {
const resp = await fetch(`/dossier/${dossierGUID}/labs?q=${encodeURIComponent(query)}`);
const data = await resp.json();
// Update ref data and loinc names from server
labRefData = data.refs || {};
loincNames = data.loincNames || {};
if (!data.orders || data.orders.length === 0) {
table.innerHTML = '<div class="data-row"><span class="text-muted">No results</span></div>';
const chart = card.querySelector('.filter-chart');
if (chart) chart.style.display = 'none';
return;
}
// Render orders + children into DOM (same structure as server-rendered buildLabItems)
let html = '';
data.orders.forEach((order, i) => {
// Expandable order row
html += `<div class="data-row expandable expanded" data-index="${i}" onclick="toggleSection(this)">
<div class="data-row-main">
<span class="expand-icon"></span>
<span class="data-label">${esc(order.name)}</span>
</div>
<div class="data-values">
<span class="data-value mono">${order.count} results</span>
${order.date ? `<span class="data-date" data-date="${order.date}"></span>` : ''}
${order.time ? `<span class="data-time">${esc(order.time)}</span>` : ''}
</div>
</div>`;
// Children (visible)
html += `<div class="section-children show" data-index="${i}">`;
for (const child of order.children) {
html += `<div class="data-row child"${child.loinc ? ` data-loinc="${child.loinc}"` : ''}>
<span class="data-label">${esc(child.label)}</span>
</div>`;
}
html += '</div>';
});
table.innerHTML = html;
// Format dates
table.querySelectorAll('[data-date]').forEach(el => {
const d = el.dataset.date;
if (d && d.length === 8) {
const date = new Date(+d.slice(0,4), +d.slice(4,6)-1, +d.slice(6,8));
el.textContent = date.toLocaleDateString();
}
});
// Build chart from the rendered DOM (scraping approach)
renderFilterChart(card, table, query);
} catch (e) {
table.innerHTML = '<div class="data-row"><span class="text-muted">Error loading results</span></div>';
}
}
function esc(s) { const d = document.createElement('div'); d.textContent = s; return d.innerHTML; }
function renderFilterChart(card, table, q) {
let wrapper = card.querySelector('.filter-chart');
if (!wrapper) {
@ -223,92 +525,37 @@ function renderFilterChart(card, table, q) {
if (!q || q.length < 3) { wrapper.style.display = 'none'; return; }
// Collect data points: {name, value, date} from visible children
// Collect data points from visible children with data-loinc attributes
const series = {};
let debugInfo = { rowsFound: 0, rowsWithDate: 0, childrenProcessed: 0, standaloneProcessed: 0, regexFails: 0, pointsAdded: 0, loincMatches: 0 };
// Look up LOINC codes for this search term
const matchLoincs = getMatchingLoincs(q);
console.log('[Chart Debug] Query:', q, 'Matched LOINCs:', Array.from(matchLoincs));
// Process expandable rows (lab orders with children)
table.querySelectorAll('.data-row.expandable').forEach(row => {
debugInfo.rowsFound++;
if (row.style.display === 'none') return;
const dateStr = row.querySelector('[data-date]')?.dataset.date;
if (!dateStr || dateStr.length !== 8) {
console.log('[Chart Debug] Row missing date:', row.querySelector('.data-label')?.textContent, 'dateStr:', dateStr);
return;
}
debugInfo.rowsWithDate++;
if (!dateStr || dateStr.length !== 8) return;
const date = new Date(+dateStr.slice(0,4), +dateStr.slice(4,6)-1, +dateStr.slice(6,8));
const children = row.nextElementSibling;
if (!children || !children.classList.contains('section-children')) return;
children.querySelectorAll('.data-row.child').forEach(c => {
debugInfo.childrenProcessed++;
if (c.style.display === 'none') return;
// Match by LOINC code
const childLoinc = c.dataset.loinc;
if (childLoinc && matchLoincs.has(childLoinc)) {
debugInfo.loincMatches++;
const text = c.querySelector('.data-label')?.textContent || '';
const m = text.match(/^([^:]+):\s*([\d.]+)\s*(.*)/);
if (!m) {
console.log('[Chart Debug] Regex failed for text:', text);
debugInfo.regexFails++;
return;
}
const abbr = m[1].trim();
const val = parseFloat(m[2]);
const unit = m[3].trim();
if (isNaN(val)) return;
if (!childLoinc) return;
// Use LOINC code as key to group all results for same test
if (!series[childLoinc]) {
series[childLoinc] = { abbr, unit, points: [], loinc: childLoinc };
}
series[childLoinc].points.push({ date, val });
debugInfo.pointsAdded++;
}
});
});
// Also process standalone rows (non-expandable lab results)
table.querySelectorAll('.data-row.single').forEach(row => {
debugInfo.standaloneProcessed++;
if (row.style.display === 'none') return;
const rowLoinc = row.dataset.loinc;
if (rowLoinc && matchLoincs.has(rowLoinc)) {
const dateStr = row.querySelector('[data-date]')?.dataset.date;
if (!dateStr || dateStr.length !== 8) return;
const date = new Date(+dateStr.slice(0,4), +dateStr.slice(4,6)-1, +dateStr.slice(6,8));
const text = row.querySelector('.data-label')?.textContent || '';
const text = c.querySelector('.data-label')?.textContent || '';
const m = text.match(/^([^:]+):\s*([\d.]+)\s*(.*)/);
if (!m) {
debugInfo.regexFails++;
return;
}
if (!m) return;
const abbr = m[1].trim();
const val = parseFloat(m[2]);
const unit = m[3].trim();
if (isNaN(val)) return;
// Use LOINC code as key to group all results for same test
if (!series[rowLoinc]) {
series[rowLoinc] = { abbr, unit, points: [], loinc: rowLoinc };
if (!series[childLoinc]) {
series[childLoinc] = { abbr, unit, points: [], loinc: childLoinc };
}
series[rowLoinc].points.push({ date, val });
debugInfo.pointsAdded++;
debugInfo.loincMatches++;
}
series[childLoinc].points.push({ date, val });
});
});
console.log('[Chart Debug] Debug info:', debugInfo, 'Series:', Object.keys(series).map(k => `${k}: ${series[k].points.length} points`));
const chartable = Object.entries(series).filter(([,s]) => s.points.length >= 2);
if (chartable.length === 0) {
console.log('[Chart Debug] No chartable series (need >= 2 points)');
@ -732,7 +979,10 @@ loadGeneticsCategories();
<a href="{{.ActionURL}}" {{if eq .ID "imaging"}}target="_blank"{{end}} class="btn btn-small{{if eq .ID "checkin"}} btn-primary{{end}}">{{.ActionLabel}}</a>
{{end}}
{{if .Searchable}}
<input type="text" class="search-input" placeholder="Filter (3+ chars for chart)" oninput="filterSection(this)">
<div class="search-wrap">
<input type="text" class="search-input" placeholder="Filter (3+ chars for chart)" oninput="filterSection(this)">
<span class="search-clear" onclick="clearSearch(this)">&times;</span>
</div>
{{end}}
</div>
@ -754,23 +1004,24 @@ loadGeneticsCategories();
<div class="data-table">
{{range $i, $item := .Items}}
{{if $item.Expandable}}
<div class="data-row expandable{{if gt $i 4}} hidden-row{{end}}" data-index="{{$i}}" onclick="toggleSection(this)">
<div class="data-row expandable{{if gt $i 4}} hidden-row{{end}}" data-index="{{$i}}"{{if $item.ID}} data-entry-id="{{$item.ID}}"{{end}} onclick="toggleSection(this)">
<div class="data-row-main">
<span class="expand-icon">+</span>
<span class="data-label">{{$item.Label}}</span>
{{if $item.Meta}}<span class="data-meta">{{$item.Meta}}</span>{{end}}
</div>
<div class="data-values">
{{if $item.Value}}<span class="data-value mono">{{$item.Value}}</span>{{end}}
{{if $item.Date}}<span class="data-date" data-date="{{$item.Date}}"></span>{{end}}
{{if $item.Time}}<span class="data-time">{{$item.Time}}</span>{{end}}
{{if $item.LinkURL}}<a href="{{$item.LinkURL}}" target="_blank" class="btn-icon" onclick="event.stopPropagation()" title="{{$item.LinkTitle}}">→</a>{{end}}
{{if and $item.LinkURL (eq $item.LinkTitle "source")}}<a href="#" class="btn-icon" data-doc-id="{{$item.LinkURL}}" data-spans="{{$item.SourceSpansJSON}}" data-highlight="{{$item.Label}}" onclick="event.stopPropagation(); openDocPane(this.dataset.docId, this.dataset.spans, this.dataset.highlight); return false;" title="View source document">📄</a>{{else if $item.LinkURL}}<a href="{{$item.LinkURL}}" target="_blank" class="btn-icon" onclick="event.stopPropagation()" title="{{$item.LinkTitle}}">→</a>{{end}}
</div>
</div>
<div class="section-children{{if gt $i 4}} hidden-row{{end}}" data-index="{{$i}}">
{{range $item.Children}}
<div class="data-row child"{{if .Type}} data-loinc="{{.Type}}"{{end}}>
<span class="data-label">{{.Label}}</span>
{{if .Value}}<span class="data-value mono">{{.Value}}</span>{{end}}
<span class="data-label detail-key">{{.Label}}</span>
{{if .Value}}<span class="data-value">{{.Value}}</span>{{end}}
{{if .LinkURL}}<a href="{{.LinkURL}}" target="_blank" class="btn-icon" title="{{.LinkTitle}}">→</a>{{end}}
</div>
{{end}}
@ -786,7 +1037,7 @@ loadGeneticsCategories();
{{if $item.Type}}<span class="data-value">{{$item.Type}}</span>{{end}}
{{if $item.Date}}<span class="data-date" data-date="{{$item.Date}}"></span>{{end}}
{{if $item.Time}}<span class="data-time">{{$item.Time}}</span>{{end}}
{{if $item.LinkURL}}<a href="{{$item.LinkURL}}" target="_blank" class="btn-icon" title="{{$item.LinkTitle}}">→</a>{{end}}
{{if and $item.LinkURL (eq $item.LinkTitle "source")}}<a href="#" class="btn-icon" data-doc-id="{{$item.LinkURL}}" data-spans="{{$item.SourceSpansJSON}}" data-highlight="{{$item.Label}}" onclick="event.stopPropagation(); openDocPane(this.dataset.docId, this.dataset.spans, this.dataset.highlight); return false;" title="View source document">📄</a>{{else if $item.LinkURL}}<a href="{{$item.LinkURL}}" target="_blank" class="btn-icon" title="{{$item.LinkTitle}}">→</a>{{end}}
</div>
</div>
{{end}}

View File

@ -191,6 +191,7 @@ async function handleFiles(files) {
processLabel.style.display = 'none';
let geneticsFileId = null;
let pdfFileIds = [];
totalFileCount = files.length;
let completed = 0;
const concurrency = 10;
@ -205,6 +206,7 @@ async function handleFiles(files) {
const resp = await fetch('/dossier/' + dossierGUID + '/upload', { method: 'POST', body: form });
const data = await resp.json();
if (category === 'genetics' && data.id) geneticsFileId = data.id;
if (category === 'pdf' && data.id) pdfFileIds.push(data.id);
} catch (e) { console.error('Upload failed:', e); }
completed++;
uploadText.textContent = completed + ' / ' + files.length;
@ -232,7 +234,18 @@ async function handleFiles(files) {
return;
}
// Process all uploaded files in one batch
// PDF: poll for processing completion
if (pdfFileIds.length > 0) {
processText.textContent = 'Processing documents...';
processDetail.textContent = 'OCR + extracting data...';
processLabel.style.display = '';
await Promise.all(pdfFileIds.map(id => pollStatus(id)));
overlay.style.display = 'none';
location.reload();
return;
}
// Process all uploaded files in one batch (DICOM only)
if (category !== 'genetics') {
fetch('/dossier/' + dossierGUID + '/process-imaging', { method: 'POST' });
await pollProcessing();
@ -247,8 +260,8 @@ async function pollStatus(fileId) {
try {
const resp = await fetch('/dossier/' + dossierGUID + '/files/' + fileId + '/status');
const data = await resp.json();
if (data.status === 'completed') {
showToast('Genetics data processed successfully!', 'success');
if (data.status === 'completed' || data.status === 'processed') {
showToast('Processing complete!', 'success');
return;
} else if (data.status === 'failed') {
showToast('Processing failed: ' + data.details, 'error');

View File

@ -1,13 +1,17 @@
package main
import (
"encoding/base64"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"regexp"
"sort"
"strings"
"sync"
"time"
@ -360,6 +364,9 @@ func handleUploadPost(w http.ResponseWriter, r *http.Request) {
if category == "genetics" {
go processGenomeUpload(entryID, targetID, filePath)
}
if category == "pdf" {
go processDocumentUpload(entryID, targetID, filePath, fileName)
}
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(fmt.Sprintf(`{"status":"ok","id":"%s"}`, entryID)))
}
@ -746,3 +753,356 @@ func runProcessImaging(actorID, targetID string) {
lib.AuditLog(actorID, "json_import", targetID, fmt.Sprintf("files=%d", jsonImported))
}
}
// extractedEntry is the JSON structure returned by extraction prompts
type extractedEntry struct {
Type string `json:"type"`
Value string `json:"value"`
Summary string `json:"summary"`
SummaryTranslated string `json:"summary_translated,omitempty"`
SearchKey string `json:"search_key,omitempty"`
Timestamp string `json:"timestamp,omitempty"`
Data map[string]interface{} `json:"data"`
SourceSpans []sourceSpan `json:"source_spans,omitempty"`
}
type sourceSpan struct {
Start string `json:"start"`
End string `json:"end"`
}
// extractionPreamble returns common instructions prepended to every extraction prompt.
func extractionPreamble(targetLang string) string {
s := `IMPORTANT RULES (apply to all entries you return):
- Do NOT translate. Keep ALL text values (summary, value, data fields) in the ORIGINAL language of the document.
- For each entry, include "source_spans": an array of {"start": "...", "end": "..."} where start/end are the VERBATIM first and last 5-8 words of the relevant passage(s) in the source markdown. This is used to highlight the source text. Multiple spans are allowed.
- For each entry, include "search_key": a short normalized deduplication key in English lowercase. Format: "thing:qualifier:YYYY-MM" or "thing:qualifier" for undated facts. Examples: "surgery:vp-shunt:2020-07", "device:ommaya-reservoir:2020-04", "diagnosis:hydrocephalus", "provider:peraud:ulm". Same real-world fact across different documents MUST produce the same key.
`
if targetLang != "" {
s += `- Include "summary_translated": a translation of the summary field into ` + targetLang + `.
`
}
return s
}
// loadExtractionPrompts discovers all extract_*.md files and returns {categoryID: prompt content}.
func loadExtractionPrompts() map[int]string {
pattern := filepath.Join(lib.TrackerPromptsDir(), "extract_*.md")
files, _ := filepath.Glob(pattern)
prompts := make(map[int]string)
for _, f := range files {
// extract_device.md → "device"
base := filepath.Base(f)
name := strings.TrimPrefix(base, "extract_")
name = strings.TrimSuffix(name, ".md")
catID, ok := lib.CategoryFromString[name]
if !ok {
log.Printf("[doc-import] Unknown category in prompt file: %s", base)
continue
}
data, err := os.ReadFile(f)
if err != nil {
continue
}
prompts[catID] = string(data)
}
return prompts
}
// parseTimestamp tries to parse a date string into Unix timestamp.
func parseTimestamp(s string) int64 {
if s == "" {
return 0
}
for _, fmt := range []string{"2006-01-02", "02.01.2006", "01/02/2006", "Jan 2, 2006"} {
if t, err := time.Parse(fmt, s); err == nil {
return t.Unix()
}
}
return 0
}
const (
fireworksVisionModel = "accounts/fireworks/models/qwen3-vl-30b-a3b-instruct"
fireworksTextModel = "accounts/fireworks/models/qwen3-vl-30b-a3b-instruct"
)
var ocrPrompt = `You are a medical document OCR system. Produce a faithful markdown transcription of this document.
The images are sequential pages of the same document. Process them in order: page 1 first, then page 2, etc.
Rules:
- Read each page top-to-bottom, left-to-right. For multi-column layouts, transcribe the full page as a human would read it.
- Preserve ALL text, dates, values, names, addresses, and structure
- Translate nothing keep the original language
- Use markdown headers, lists, and formatting to reflect the document structure
- For tables, use markdown tables. Preserve numeric values exactly.
- Be complete do not skip or summarize anything
- Do not describe visual elements (logos, signatures) only transcribe text
- For handwritten text, transcribe as accurately as possible. Mark uncertain readings with [?]`
func processDocumentUpload(uploadID, dossierID, filePath, fileName string) {
log.Printf("[doc-import] Starting for %s (%s)", fileName, dossierID)
// Update upload status
setUploadStatus := func(status string) {
if entry, err := lib.EntryGet(nil, uploadID); err == nil {
var d UploadData
json.Unmarshal([]byte(entry.Data), &d)
d.Status = status
data, _ := json.Marshal(d)
entry.Data = string(data)
lib.EntryWrite("", entry)
}
}
setUploadStatus("processing")
// 1. Decrypt PDF
pdfBytes, err := lib.DecryptFile(filePath)
if err != nil {
log.Printf("[doc-import] Decrypt failed: %v", err)
setUploadStatus("failed")
return
}
// 2. Convert PDF to PNG pages via pdftoppm
tempDir, err := os.MkdirTemp("", "doc-import-*")
if err != nil {
log.Printf("[doc-import] MkdirTemp failed: %v", err)
setUploadStatus("failed")
return
}
defer os.RemoveAll(tempDir)
pdfPath := filepath.Join(tempDir, "input.pdf")
if err := os.WriteFile(pdfPath, pdfBytes, 0644); err != nil {
log.Printf("[doc-import] WriteFile failed: %v", err)
setUploadStatus("failed")
return
}
prefix := filepath.Join(tempDir, "page")
cmd := exec.Command("pdftoppm", "-png", "-r", "200", pdfPath, prefix)
if out, err := cmd.CombinedOutput(); err != nil {
log.Printf("[doc-import] pdftoppm failed: %v: %s", err, out)
setUploadStatus("failed")
return
}
// Collect page images sorted by name
pageFiles, _ := filepath.Glob(prefix + "*.png")
sort.Strings(pageFiles)
if len(pageFiles) == 0 {
log.Printf("[doc-import] No pages generated")
setUploadStatus("failed")
return
}
log.Printf("[doc-import] %d pages converted", len(pageFiles))
// 3. OCR: send pages to Fireworks vision model
content := []interface{}{
map[string]string{"type": "text", "text": ocrPrompt},
}
for _, pf := range pageFiles {
imgBytes, err := os.ReadFile(pf)
if err != nil {
continue
}
b64 := base64.StdEncoding.EncodeToString(imgBytes)
content = append(content, map[string]interface{}{
"type": "image_url",
"image_url": map[string]string{
"url": "data:image/png;base64," + b64,
},
})
}
messages := []map[string]interface{}{
{"role": "user", "content": content},
}
markdown, err := lib.CallFireworks(fireworksVisionModel, messages, 16384)
if err != nil {
log.Printf("[doc-import] OCR failed: %v", err)
setUploadStatus("failed")
return
}
log.Printf("[doc-import] OCR done: %d chars markdown", len(markdown))
// 4. Create document entry with markdown
now := time.Now().Unix()
docData := map[string]interface{}{
"markdown": markdown,
"source_upload": uploadID,
"pages": len(pageFiles),
}
docDataJSON, _ := json.Marshal(docData)
docEntry := &lib.Entry{
DossierID: dossierID,
Category: lib.CategoryDocument,
Type: "pdf",
Value: fileName,
Timestamp: now,
Data: string(docDataJSON),
}
lib.EntryWrite("", docEntry)
docID := docEntry.EntryID
log.Printf("[doc-import] Document entry created: %s", docID)
// 5. Fan out category extraction + optional translation
type catResult struct {
Category int
Entries []extractedEntry
}
var mu sync.Mutex
var results []catResult
var wg sync.WaitGroup
// Get dossier language for translations
var targetLang string
if d, err := lib.DossierGet("", dossierID); err == nil && d.Preferences.Language != "" {
targetLang = d.Preferences.Language
}
preamble := extractionPreamble(targetLang)
// Translate full markdown in parallel if target language is set
var translatedMarkdown string
if targetLang != "" {
wg.Add(1)
go func() {
defer wg.Done()
prompt := fmt.Sprintf("Translate this medical document to %s. Preserve all markdown formatting, headers, tables, and structure exactly. Translate ALL text including headers and labels. Output ONLY the translated markdown, nothing else.\n\n%s", targetLang, markdown)
msgs := []map[string]interface{}{
{"role": "user", "content": prompt},
}
resp, err := lib.CallFireworks(fireworksTextModel, msgs, 16384)
if err != nil {
log.Printf("[doc-import] Translation failed: %v", err)
return
}
mu.Lock()
translatedMarkdown = resp
mu.Unlock()
log.Printf("[doc-import] Translated to %s: %d chars", targetLang, len(resp))
}()
}
prompts := loadExtractionPrompts()
log.Printf("[doc-import] Loaded %d extraction prompts (lang=%s)", len(prompts), targetLang)
for catID, promptTmpl := range prompts {
wg.Add(1)
go func(catID int, promptTmpl string) {
defer wg.Done()
prompt := preamble + "\n" + strings.ReplaceAll(promptTmpl, "{{MARKDOWN}}", markdown)
msgs := []map[string]interface{}{
{"role": "user", "content": prompt},
}
resp, err := lib.CallFireworks(fireworksTextModel, msgs, 4096)
if err != nil {
log.Printf("[doc-import] Category %d failed: %v", catID, err)
return
}
resp = strings.TrimSpace(resp)
if resp == "null" || resp == "" {
return
}
// Parse as array of entries
var entries []extractedEntry
if err := json.Unmarshal([]byte(resp), &entries); err != nil {
// Try single object
var single extractedEntry
if err2 := json.Unmarshal([]byte(resp), &single); err2 == nil && single.Summary != "" {
entries = []extractedEntry{single}
} else {
log.Printf("[doc-import] Category %d: parse failed: %v", catID, err)
return
}
}
if len(entries) == 0 {
return
}
mu.Lock()
results = append(results, catResult{Category: catID, Entries: entries})
mu.Unlock()
}(catID, promptTmpl)
}
wg.Wait()
// Save translated markdown to document entry if available
if translatedMarkdown != "" {
if docEntry, err := lib.EntryGet(nil, docID); err == nil {
var dd map[string]interface{}
json.Unmarshal([]byte(docEntry.Data), &dd)
dd["markdown_translated"] = translatedMarkdown
dd["translated_to"] = targetLang
b, _ := json.Marshal(dd)
docEntry.Data = string(b)
lib.EntryWrite("", docEntry)
}
}
totalEntries := 0
for _, r := range results {
totalEntries += len(r.Entries)
}
log.Printf("[doc-import] Extraction done: %d categories, %d entries", len(results), totalEntries)
// 6. Create entries for each extracted item
var createdIDs []string
for _, r := range results {
for _, e := range r.Entries {
// Build Data JSON with source reference + extracted fields
dataMap := map[string]interface{}{
"source_doc_id": docID,
}
for k, v := range e.Data {
dataMap[k] = v
}
if len(e.SourceSpans) > 0 {
dataMap["source_spans"] = e.SourceSpans
}
if e.SummaryTranslated != "" {
dataMap["summary_translated"] = e.SummaryTranslated
}
dataJSON, _ := json.Marshal(dataMap)
ts := now
if parsed := parseTimestamp(e.Timestamp); parsed > 0 {
ts = parsed
}
entry := &lib.Entry{
DossierID: dossierID,
ParentID: docID,
Category: r.Category,
Type: e.Type,
Value: e.Value,
Summary: e.Summary,
SearchKey: e.SearchKey,
Timestamp: ts,
Data: string(dataJSON),
}
lib.EntryWrite("", entry)
createdIDs = append(createdIDs, entry.EntryID)
}
}
// 7. Update upload status with created entry IDs (for undo)
if entry, err := lib.EntryGet(nil, uploadID); err == nil {
var data map[string]interface{}
json.Unmarshal([]byte(entry.Data), &data)
data["status"] = "processed"
data["created_entries"] = append([]string{docID}, createdIDs...)
b, _ := json.Marshal(data)
entry.Data = string(b)
lib.EntryWrite("", entry)
}
log.Printf("[doc-import] Complete: %s → doc=%s, %d extracts", fileName, docID, len(createdIDs))
lib.AuditLog("", "doc_import", dossierID, fmt.Sprintf("file=%s doc=%s categories=%d", fileName, docID, len(results)))
}

52
tools/fix-lang/main.go Normal file
View File

@ -0,0 +1,52 @@
package main
import (
"encoding/base64"
"fmt"
"log"
"os"
"inou/lib"
)
func main() {
if len(os.Args) < 3 {
fmt.Println("Usage: fix-lang <dossierID> <lang>")
fmt.Println(" fix-data <dossierID> - re-encrypt plaintext Data")
os.Exit(1)
}
dossierID := os.Args[1]
lang := os.Args[2]
if err := lib.Init(); err != nil {
log.Fatal("lib.Init:", err)
}
lib.ConfigInit()
if lang == "fix-data" {
// Re-encrypt plaintext Data by reading raw and re-packing
plainJSON := `{"dob":"2020-02-26","sex":2,"lang":"en"}`
packed := lib.Pack([]byte(plainJSON))
encoded := base64.StdEncoding.EncodeToString(packed)
fmt.Printf("Packed Data (%d bytes): %s\n", len(encoded), encoded[:40]+"...")
// Write via raw SQL won't work without db access — need EntryWrite
// Instead, use lib.Save directly
fmt.Println("Use this to update: UPDATE entries SET Data = X'...' WHERE EntryID = ...")
fmt.Printf("Hex: ")
for _, b := range packed {
fmt.Printf("%02X", b)
}
fmt.Println()
return
}
d, err := lib.DossierGet("", dossierID)
if err != nil {
log.Fatal("DossierGet:", err)
}
fmt.Printf("Dossier: %s (%s), current lang: %q\n", d.Name, d.DossierID, d.Preferences.Language)
d.Preferences.Language = lang
if err := lib.DossierWrite(d.DossierID, d); err != nil {
log.Fatal("DossierWrite:", err)
}
fmt.Printf("Set lang to %q\n", lang)
}

View File

@ -0,0 +1,282 @@
package main
import (
"encoding/base64"
"encoding/json"
"fmt"
"inou/lib"
"log"
"os"
"os/exec"
"path/filepath"
"sort"
"strings"
"sync"
"time"
)
type extractedEntry struct {
Type string `json:"type"`
Value string `json:"value"`
Summary string `json:"summary"`
SummaryTranslated string `json:"summary_translated,omitempty"`
SearchKey string `json:"search_key,omitempty"`
Timestamp string `json:"timestamp,omitempty"`
Data map[string]interface{} `json:"data"`
SourceSpans []sourceSpan `json:"source_spans,omitempty"`
}
type sourceSpan struct {
Start string `json:"start"`
End string `json:"end"`
}
var extractionPreamble = `IMPORTANT RULES (apply to all entries you return):
- Do NOT translate. Keep ALL text values (summary, value, data fields) in the ORIGINAL language of the document.
- For each entry, include "source_spans": an array of {"start": "...", "end": "..."} where start/end are the VERBATIM first and last 5-8 words of the relevant passage(s) in the source markdown. This is used to highlight the source text. Multiple spans are allowed.
- For each entry, include "search_key": a short normalized deduplication key in English lowercase. Format: "thing:qualifier:YYYY-MM" or "thing:qualifier" for undated facts. Examples: "surgery:vp-shunt:2020-07", "device:ommaya-reservoir:2020-04", "diagnosis:hydrocephalus", "provider:peraud:ulm". Same real-world fact across different documents MUST produce the same key.
`
// loadExtractionPrompts discovers all extract_*.md files and returns {categoryID: prompt content}.
func loadExtractionPrompts() map[int]string {
pattern := filepath.Join(lib.TrackerPromptsDir(), "extract_*.md")
files, _ := filepath.Glob(pattern)
prompts := make(map[int]string)
for _, f := range files {
base := filepath.Base(f)
name := strings.TrimPrefix(base, "extract_")
name = strings.TrimSuffix(name, ".md")
catID, ok := lib.CategoryFromString[name]
if !ok {
fmt.Printf("Unknown category in prompt file: %s\n", base)
continue
}
data, err := os.ReadFile(f)
if err != nil {
continue
}
prompts[catID] = string(data)
}
return prompts
}
const (
visionModel = "accounts/fireworks/models/qwen3-vl-30b-a3b-instruct"
textModel = "accounts/fireworks/models/qwen3-vl-30b-a3b-instruct"
)
var ocrPrompt = `You are a medical document OCR system. Produce a faithful markdown transcription of this document.
The images are sequential pages of the same document. Process them in order: page 1 first, then page 2, etc.
Rules:
- Read each page top-to-bottom, left-to-right
- Preserve ALL text, dates, values, names, addresses, and structure
- Translate nothing keep the original language
- Use markdown headers, lists, and formatting to reflect the document structure
- For tables, use markdown tables. Preserve numeric values exactly.
- Be complete do not skip or summarize anything
- Do not describe visual elements (logos, signatures) only transcribe text
- For handwritten text, transcribe as accurately as possible. Mark uncertain readings with [?]`
func main() {
if len(os.Args) < 3 {
fmt.Fprintf(os.Stderr, "Usage: test-doc-import <dossierID> <pdf-path>\n")
os.Exit(1)
}
dossierID := os.Args[1]
pdfPath := os.Args[2]
fileName := filepath.Base(pdfPath)
if err := lib.Init(); err != nil {
log.Fatalf("lib.Init: %v", err)
}
lib.ConfigInit()
lib.InitPrompts("tracker_prompts")
fmt.Printf("Prompts dir: %s\n", lib.TrackerPromptsDir())
// 1. Convert PDF to PNG pages
tempDir, _ := os.MkdirTemp("", "doc-import-*")
defer os.RemoveAll(tempDir)
prefix := filepath.Join(tempDir, "page")
cmd := exec.Command("pdftoppm", "-png", "-r", "200", pdfPath, prefix)
if out, err := cmd.CombinedOutput(); err != nil {
log.Fatalf("pdftoppm: %v: %s", err, out)
}
pageFiles, _ := filepath.Glob(prefix + "*.png")
sort.Strings(pageFiles)
fmt.Printf("%d pages converted\n", len(pageFiles))
// 2. OCR
content := []interface{}{
map[string]string{"type": "text", "text": ocrPrompt},
}
for _, pf := range pageFiles {
imgBytes, _ := os.ReadFile(pf)
b64 := base64.StdEncoding.EncodeToString(imgBytes)
content = append(content, map[string]interface{}{
"type": "image_url",
"image_url": map[string]string{
"url": "data:image/png;base64," + b64,
},
})
}
fmt.Printf("Calling OCR...\n")
start := time.Now()
markdown, err := lib.CallFireworks(visionModel, []map[string]interface{}{
{"role": "user", "content": content},
}, 16384)
if err != nil {
log.Fatalf("OCR: %v", err)
}
fmt.Printf("OCR done: %d chars in %.1fs\n", len(markdown), time.Since(start).Seconds())
// 3. Create document entry
now := time.Now().Unix()
docData := map[string]interface{}{
"markdown": markdown,
"pages": len(pageFiles),
}
docDataJSON, _ := json.Marshal(docData)
docEntry := &lib.Entry{
DossierID: dossierID,
Category: lib.CategoryDocument,
Type: "pdf",
Value: fileName,
Timestamp: now,
Data: string(docDataJSON),
}
lib.EntryWrite("", docEntry)
docID := docEntry.EntryID
fmt.Printf("Document entry: %s\n", docID)
// 4. Fan out extraction
type catResult struct {
Category int
Entries []extractedEntry
}
var mu sync.Mutex
var results []catResult
var wg sync.WaitGroup
prompts := loadExtractionPrompts()
fmt.Printf("Starting %d extraction calls...\n", len(prompts))
extractStart := time.Now()
for catID, promptTmpl := range prompts {
wg.Add(1)
go func(catID int, promptTmpl string) {
defer wg.Done()
catName := lib.CategoryName(catID)
prompt := extractionPreamble + "\n" + strings.ReplaceAll(promptTmpl, "{{MARKDOWN}}", markdown)
msgs := []map[string]interface{}{
{"role": "user", "content": prompt},
}
resp, err := lib.CallFireworks(textModel, msgs, 4096)
if err != nil {
fmt.Printf(" [%s] API error: %v\n", catName, err)
return
}
resp = strings.TrimSpace(resp)
if resp == "null" || resp == "" {
fmt.Printf(" [%s] → null\n", catName)
return
}
var entries []extractedEntry
if err := json.Unmarshal([]byte(resp), &entries); err != nil {
var single extractedEntry
if err2 := json.Unmarshal([]byte(resp), &single); err2 == nil && single.Summary != "" {
entries = []extractedEntry{single}
} else {
fmt.Printf(" [%s] → parse error: %v\n Response: %s\n", catName, err, resp[:min(200, len(resp))])
return
}
}
if len(entries) == 0 {
fmt.Printf(" [%s] → empty array\n", catName)
return
}
fmt.Printf(" [%s] → %d entries\n", catName, len(entries))
mu.Lock()
results = append(results, catResult{Category: catID, Entries: entries})
mu.Unlock()
}(catID, promptTmpl)
}
wg.Wait()
fmt.Printf("Extraction done in %.1fs: %d categories\n", time.Since(extractStart).Seconds(), len(results))
// 5. Create entries
var totalEntries int
for _, r := range results {
for _, e := range r.Entries {
dataMap := map[string]interface{}{"source_doc_id": docID}
for k, v := range e.Data {
dataMap[k] = v
}
if len(e.SourceSpans) > 0 {
dataMap["source_spans"] = e.SourceSpans
}
if e.SummaryTranslated != "" {
dataMap["summary_translated"] = e.SummaryTranslated
}
dataJSON, _ := json.Marshal(dataMap)
ts := now
if e.Timestamp != "" {
for _, layout := range []string{"2006-01-02", "02.01.2006", "01/02/2006"} {
if t, err := time.Parse(layout, e.Timestamp); err == nil {
ts = t.Unix()
break
}
}
}
entry := &lib.Entry{
DossierID: dossierID,
ParentID: docID,
Category: r.Category,
Type: e.Type,
Value: e.Value,
Summary: e.Summary,
SearchKey: e.SearchKey,
Timestamp: ts,
Data: string(dataJSON),
}
lib.EntryWrite("", entry)
totalEntries++
}
}
fmt.Printf("Created %d entries under doc %s\n", totalEntries, docID)
// 6. Show results
fmt.Println("\n=== Results ===")
for _, r := range results {
catName := lib.CategoryName(r.Category)
for _, e := range r.Entries {
spans := ""
if len(e.SourceSpans) > 0 {
spans = fmt.Sprintf(" spans=%d", len(e.SourceSpans))
}
trans := ""
if e.SummaryTranslated != "" {
trans = fmt.Sprintf(" [%s]", e.SummaryTranslated)
}
fmt.Printf(" [%s] Type=%s Summary=%s%s%s\n", catName, e.Type, e.Summary, trans, spans)
}
}
}
func min(a, b int) int {
if a < b {
return a
}
return b
}