chore: commit accumulated WIP (Mar 23)
Modified: - api/api_contact_sheet.go, api/api_image.go — image/contact sheet cleanup - cmd/import-lab/main.go — minor fix - docs/schema-auth.sql — auth schema updates - lib/normalize.go, lib/stubs.go — normalization refactor - portal/defense.go — new defense middleware - portal/dossier_sections.go — section updates - portal/genome.go — genome fix - portal/main.go — main portal updates - portal/mcp_http.go, portal/mcp_tools.go — MCP refactor - portal/static/viewer.js — viewer fix - marketing/twitter/header-FINAL.png — updated asset New files: - portal/oauth_chatgpt.go — ChatGPT OAuth integration - lib/loinc.go — LOINC code lookup - lib/render.go — rendering engine - lib/Sora-Regular.ttf, lib/Sora-SemiBold.ttf — fonts - docs/chatgpt-actions-setup.md, docs/openapi.yaml — docs - portal/static/claditor-logo.css — styling - web/static/genetics.html — genetics page - tools/loinc-lookup/ — LOINC lookup tool - marketing/screens/ — screenshots - import-renpho/import-renpho — renpho binary
This commit is contained in:
parent
bf57e28e71
commit
ade93669d3
|
|
@ -1,63 +1,13 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
_ "embed"
|
|
||||||
"encoding/json"
|
|
||||||
"image"
|
|
||||||
"image/color"
|
|
||||||
"image/draw"
|
|
||||||
"image/png"
|
|
||||||
"log"
|
|
||||||
"math"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/chai2010/webp"
|
|
||||||
"golang.org/x/image/font"
|
|
||||||
"golang.org/x/image/font/opentype"
|
|
||||||
"golang.org/x/image/math/fixed"
|
|
||||||
"inou/lib"
|
"inou/lib"
|
||||||
|
|
||||||
xdraw "golang.org/x/image/draw"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed Sora-Regular.ttf
|
|
||||||
var soraRegularData []byte
|
|
||||||
|
|
||||||
//go:embed Sora-SemiBold.ttf
|
|
||||||
var soraSemiBoldData []byte
|
|
||||||
|
|
||||||
var (
|
|
||||||
soraFace14 font.Face
|
|
||||||
soraFace12 font.Face
|
|
||||||
soraBoldFace14 font.Face
|
|
||||||
soraBoldFace20 font.Face
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
regular, err := opentype.Parse(soraRegularData)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to parse Sora Regular: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
semibold, err := opentype.Parse(soraSemiBoldData)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("Failed to parse Sora SemiBold: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
soraFace14, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 14, DPI: 72})
|
|
||||||
soraFace12, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 12, DPI: 72})
|
|
||||||
soraBoldFace14, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 14, DPI: 72})
|
|
||||||
soraBoldFace20, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 20, DPI: 72})
|
|
||||||
}
|
|
||||||
|
|
||||||
const thumbSize = 128
|
|
||||||
const headerHeight = 58
|
|
||||||
const cols = 12
|
|
||||||
const padding = 2 // separation between contacts
|
|
||||||
|
|
||||||
func handleContactSheet(w http.ResponseWriter, r *http.Request) {
|
func handleContactSheet(w http.ResponseWriter, r *http.Request) {
|
||||||
seriesHex := strings.TrimPrefix(r.URL.Path, "/contact-sheet.webp/")
|
seriesHex := strings.TrimPrefix(r.URL.Path, "/contact-sheet.webp/")
|
||||||
if seriesHex == "" || len(seriesHex) != 16 {
|
if seriesHex == "" || len(seriesHex) != 16 {
|
||||||
|
|
@ -65,359 +15,27 @@ func handleContactSheet(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get access context
|
|
||||||
ctx := getAccessContextOrFail(w, r)
|
ctx := getAccessContextOrFail(w, r)
|
||||||
if ctx == nil {
|
if ctx == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse optional window/level overrides
|
|
||||||
q := r.URL.Query()
|
q := r.URL.Query()
|
||||||
var wcOverride, wwOverride float64
|
var wc, ww float64
|
||||||
var hasWLOverride bool
|
if v := q.Get("wc"); v != "" {
|
||||||
if wc := q.Get("wc"); wc != "" {
|
wc, _ = strconv.ParseFloat(v, 64)
|
||||||
wcOverride, _ = strconv.ParseFloat(wc, 64)
|
|
||||||
hasWLOverride = true
|
|
||||||
}
|
}
|
||||||
if ww := q.Get("ww"); ww != "" {
|
if v := q.Get("ww"); v != "" {
|
||||||
wwOverride, _ = strconv.ParseFloat(ww, 64)
|
ww, _ = strconv.ParseFloat(v, 64)
|
||||||
hasWLOverride = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
seriesID := seriesHex
|
body, err := lib.RenderContactSheet(ctx.AccessorID, seriesHex, wc, ww)
|
||||||
|
|
||||||
// Look up series entry (RBAC already checked by portal)
|
|
||||||
series, err := lib.EntryGet(ctx, seriesID)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "Series not found", http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
dossierID := series.DossierID
|
|
||||||
seriesDesc := series.Tags
|
|
||||||
|
|
||||||
// Look up study entry (parent of series)
|
|
||||||
study, err := lib.EntryGet(nil, series.ParentID)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "Study not found", http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
var studyData struct {
|
|
||||||
StudyDate string `json:"study_date"`
|
|
||||||
StudyDesc string `json:"study_description"`
|
|
||||||
}
|
|
||||||
json.Unmarshal([]byte(study.Data), &studyData)
|
|
||||||
|
|
||||||
// Look up dossier for patient name
|
|
||||||
dossier, _ := lib.DossierGet("", dossierID)
|
|
||||||
patientName := ""
|
|
||||||
if dossier != nil {
|
|
||||||
patientName = dossier.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get all slices for this series
|
|
||||||
entries, err := lib.EntryChildrenByType(dossierID, seriesID, "slice")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(entries) == 0 {
|
|
||||||
http.Error(w, "No slices found", http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get slice thickness for step calculation
|
|
||||||
var firstSliceData struct {
|
|
||||||
SliceThickness float64 `json:"slice_thickness"`
|
|
||||||
}
|
|
||||||
json.Unmarshal([]byte(entries[0].Data), &firstSliceData)
|
|
||||||
step := calculateStepSize(5.0, firstSliceData.SliceThickness)
|
|
||||||
|
|
||||||
// Load and resize selected slices (skip based on 5mm spacing)
|
|
||||||
type thumbInfo struct {
|
|
||||||
img image.Image
|
|
||||||
sliceNum int
|
|
||||||
pos float64
|
|
||||||
}
|
|
||||||
var thumbs []thumbInfo
|
|
||||||
var usedWC, usedWW float64 // track what window settings were actually used
|
|
||||||
for i, e := range entries {
|
|
||||||
// Same logic as dicom-import: every Nth slice starting at 1
|
|
||||||
if (i+1)%step != 1 && step != 1 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
thumb, pos, wc, ww := loadSliceThumbWithPos(ctx, e, wcOverride, wwOverride, hasWLOverride)
|
|
||||||
if thumb != nil {
|
|
||||||
thumbs = append(thumbs, thumbInfo{img: thumb, sliceNum: i + 1, pos: pos})
|
|
||||||
if usedWC == 0 && usedWW == 0 {
|
|
||||||
usedWC, usedWW = wc, ww // capture from first slice
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(thumbs) == 0 {
|
|
||||||
http.Error(w, "Could not load any images", http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate grid
|
|
||||||
gridCols := cols
|
|
||||||
if len(thumbs) < gridCols {
|
|
||||||
gridCols = len(thumbs)
|
|
||||||
}
|
|
||||||
gridRows := (len(thumbs) + gridCols - 1) / gridCols
|
|
||||||
|
|
||||||
outWidth := cols*thumbSize + (cols-1)*padding
|
|
||||||
outHeight := headerHeight + gridRows*thumbSize + (gridRows-1)*padding
|
|
||||||
|
|
||||||
out := image.NewRGBA(image.Rect(0, 0, outWidth, outHeight))
|
|
||||||
// Fill with dark grey for grid lines (contrast between contacts)
|
|
||||||
draw.Draw(out, out.Bounds(), &image.Uniform{color.RGBA{80, 80, 80, 255}}, image.Point{}, draw.Src)
|
|
||||||
|
|
||||||
// Draw header
|
|
||||||
drawHeader(out, patientName, studyData.StudyDesc, seriesDesc, studyData.StudyDate, len(entries), firstSliceData.SliceThickness, step, usedWC, usedWW)
|
|
||||||
|
|
||||||
// Draw thumbnails
|
|
||||||
for i, t := range thumbs {
|
|
||||||
col := i % cols
|
|
||||||
row := i / cols
|
|
||||||
x := col * (thumbSize + padding)
|
|
||||||
y := headerHeight + row*(thumbSize+padding)
|
|
||||||
|
|
||||||
draw.Draw(out, image.Rect(x, y, x+thumbSize, y+thumbSize), t.img, image.Point{}, draw.Src)
|
|
||||||
drawNumber(out, x+2, y+2, t.sliceNum)
|
|
||||||
drawPosition(out, x+thumbSize-2, y+2, t.pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fill unused grid cells with black
|
|
||||||
for i := len(thumbs); i < gridRows*cols; i++ {
|
|
||||||
col := i % cols
|
|
||||||
row := i / cols
|
|
||||||
x := col * (thumbSize + padding)
|
|
||||||
y := headerHeight + row*(thumbSize+padding)
|
|
||||||
draw.Draw(out, image.Rect(x, y, x+thumbSize, y+thumbSize), &image.Uniform{color.Black}, image.Point{}, draw.Src)
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "image/webp")
|
w.Header().Set("Content-Type", "image/webp")
|
||||||
w.Header().Set("Cache-Control", "public, max-age=3600")
|
w.Header().Set("Cache-Control", "public, max-age=3600")
|
||||||
webp.Encode(w, out, &webp.Options{Quality: 10})
|
w.Write(body)
|
||||||
}
|
|
||||||
|
|
||||||
func calculateStepSize(requestedSpacingMM, sliceThicknessMM float64) int {
|
|
||||||
if sliceThicknessMM <= 0 {
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
step := int(math.Round(requestedSpacingMM / sliceThicknessMM))
|
|
||||||
if step < 1 {
|
|
||||||
step = 1
|
|
||||||
}
|
|
||||||
return step
|
|
||||||
}
|
|
||||||
|
|
||||||
func drawHeader(img *image.RGBA, patient, study, series, date string, totalSlices int, sliceThickness float64, step int, wc, ww float64) {
|
|
||||||
// Format date if in YYYYMMDD format
|
|
||||||
if len(date) == 8 {
|
|
||||||
date = date[0:4] + "-" + date[4:6] + "-" + date[6:8]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Draw header background
|
|
||||||
draw.Draw(img, image.Rect(0, 0, img.Bounds().Dx(), headerHeight),
|
|
||||||
&image.Uniform{color.RGBA{32, 32, 32, 255}}, image.Point{}, draw.Src)
|
|
||||||
|
|
||||||
// Line 1: Big red warning
|
|
||||||
warning := "!! NAVIGATION ONLY - USE fetch_image FOR DIAGNOSIS !!"
|
|
||||||
drawStringBold20(img, 10, 22, warning, color.RGBA{255, 50, 50, 255})
|
|
||||||
|
|
||||||
// Line 2: Patient (white) | Series | Date | Slices | ST | WC/WW
|
|
||||||
stInfo := strconv.FormatFloat(sliceThickness, 'f', 1, 64) + "mm"
|
|
||||||
if step > 1 {
|
|
||||||
stInfo += " (every " + strconv.Itoa(step) + ")"
|
|
||||||
}
|
|
||||||
wlInfo := "WC:" + strconv.FormatFloat(wc, 'f', 0, 64) + " WW:" + strconv.FormatFloat(ww, 'f', 0, 64)
|
|
||||||
|
|
||||||
// Line 2: Patient (white) | Series | Date | Slices | ST | WC/WW
|
|
||||||
drawStringBold(img, 10, 38, patient, color.RGBA{255, 255, 255, 255})
|
|
||||||
patientWidth := measureStringBold(patient)
|
|
||||||
rest := " | " + series + " | " + date + " | " + strconv.Itoa(totalSlices) + " slices | ST " + stInfo + " | " + wlInfo
|
|
||||||
drawString(img, 10+patientWidth, 38, rest, color.RGBA{200, 200, 200, 255})
|
|
||||||
|
|
||||||
// Line 3: Instructions and legend
|
|
||||||
instructions := "MCP: fetch_image(slice_id) | API: GET /image/{slice_id} | Top-left: slice# Top-right: position(mm)"
|
|
||||||
drawString(img, 10, 54, instructions, color.RGBA{255, 255, 255, 255})
|
|
||||||
}
|
|
||||||
|
|
||||||
func drawPosition(img *image.RGBA, x, y int, pos float64) {
|
|
||||||
s := strconv.FormatFloat(pos, 'f', 1, 64) + "mm"
|
|
||||||
textWidth := measureString(s, 12) + 6
|
|
||||||
|
|
||||||
// Draw background (right-aligned)
|
|
||||||
for dy := 0; dy < 16; dy++ {
|
|
||||||
for dx := 0; dx < textWidth; dx++ {
|
|
||||||
img.SetRGBA(x-textWidth+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
drawStringSize(img, x-textWidth+3, y+12, s, color.RGBA{255, 255, 0, 255}, 12)
|
|
||||||
}
|
|
||||||
|
|
||||||
func drawString(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
|
||||||
drawStringSize(img, x, y, s, col, 14)
|
|
||||||
}
|
|
||||||
|
|
||||||
func drawStringSize(img *image.RGBA, x, y int, s string, col color.RGBA, size int) {
|
|
||||||
face := soraFace14
|
|
||||||
if size <= 12 {
|
|
||||||
face = soraFace12
|
|
||||||
}
|
|
||||||
if face == nil {
|
|
||||||
return // font not loaded
|
|
||||||
}
|
|
||||||
d := &font.Drawer{
|
|
||||||
Dst: img,
|
|
||||||
Src: &image.Uniform{col},
|
|
||||||
Face: face,
|
|
||||||
Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)},
|
|
||||||
}
|
|
||||||
d.DrawString(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
func measureString(s string, size int) int {
|
|
||||||
face := soraFace14
|
|
||||||
if size <= 12 {
|
|
||||||
face = soraFace12
|
|
||||||
}
|
|
||||||
if face == nil {
|
|
||||||
return len(s) * 8 // fallback
|
|
||||||
}
|
|
||||||
d := &font.Drawer{Face: face}
|
|
||||||
return d.MeasureString(s).Ceil()
|
|
||||||
}
|
|
||||||
|
|
||||||
func drawStringBold(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
|
||||||
if soraBoldFace14 == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
d := &font.Drawer{
|
|
||||||
Dst: img,
|
|
||||||
Src: &image.Uniform{col},
|
|
||||||
Face: soraBoldFace14,
|
|
||||||
Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)},
|
|
||||||
}
|
|
||||||
d.DrawString(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
func drawStringBold20(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
|
||||||
if soraBoldFace20 == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
d := &font.Drawer{
|
|
||||||
Dst: img,
|
|
||||||
Src: &image.Uniform{col},
|
|
||||||
Face: soraBoldFace20,
|
|
||||||
Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)},
|
|
||||||
}
|
|
||||||
d.DrawString(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
func measureStringBold(s string) int {
|
|
||||||
if soraBoldFace14 == nil {
|
|
||||||
return len(s) * 8
|
|
||||||
}
|
|
||||||
d := &font.Drawer{Face: soraBoldFace14}
|
|
||||||
return d.MeasureString(s).Ceil()
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadSliceThumbWithPos(ctx *lib.AccessContext, e *lib.Entry, wcOverride, wwOverride float64, hasOverride bool) (image.Image, float64, float64, float64) {
|
|
||||||
// Parse window/level and position from entry data
|
|
||||||
var data struct {
|
|
||||||
WindowCenter float64 `json:"window_center"`
|
|
||||||
WindowWidth float64 `json:"window_width"`
|
|
||||||
PixelMin int `json:"pixel_min"`
|
|
||||||
PixelMax int `json:"pixel_max"`
|
|
||||||
SliceLocation float64 `json:"slice_location"`
|
|
||||||
}
|
|
||||||
json.Unmarshal([]byte(e.Data), &data)
|
|
||||||
|
|
||||||
var center, width float64
|
|
||||||
if hasOverride {
|
|
||||||
center = wcOverride
|
|
||||||
width = wwOverride
|
|
||||||
if width == 0 {
|
|
||||||
width = 1
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
center = data.WindowCenter
|
|
||||||
width = data.WindowWidth
|
|
||||||
if center == 0 && width == 0 {
|
|
||||||
center = float64(data.PixelMin+data.PixelMax) / 2
|
|
||||||
width = float64(data.PixelMax - data.PixelMin)
|
|
||||||
if width == 0 {
|
|
||||||
width = 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load and decompress 16-bit PNG using RBAC-enforced object access
|
|
||||||
decData, err := lib.ObjectRead(ctx, e.DossierID, e.EntryID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, 0, 0, 0
|
|
||||||
}
|
|
||||||
|
|
||||||
img, err := png.Decode(bytes.NewReader(decData))
|
|
||||||
if err != nil {
|
|
||||||
return nil, 0, 0, 0
|
|
||||||
}
|
|
||||||
|
|
||||||
bounds := img.Bounds()
|
|
||||||
var processed image.Image
|
|
||||||
|
|
||||||
switch src := img.(type) {
|
|
||||||
case *image.Gray16:
|
|
||||||
// Apply window/level to 8-bit
|
|
||||||
low := center - width/2
|
|
||||||
high := center + width/2
|
|
||||||
gray := image.NewGray(bounds)
|
|
||||||
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
|
|
||||||
for x := bounds.Min.X; x < bounds.Max.X; x++ {
|
|
||||||
v := float64(src.Gray16At(x, y).Y)
|
|
||||||
var out uint8
|
|
||||||
if v <= low {
|
|
||||||
out = 0
|
|
||||||
} else if v >= high {
|
|
||||||
out = 255
|
|
||||||
} else {
|
|
||||||
out = uint8((v - low) * 255 / width)
|
|
||||||
}
|
|
||||||
gray.SetGray(x, y, color.Gray{Y: out})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
processed = gray
|
|
||||||
|
|
||||||
case *image.RGBA, *image.NRGBA:
|
|
||||||
// RGB images: pass through (already rendered)
|
|
||||||
processed = src
|
|
||||||
|
|
||||||
default:
|
|
||||||
return nil, 0, 0, 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resize to thumbnail
|
|
||||||
thumb := image.NewRGBA(image.Rect(0, 0, thumbSize, thumbSize))
|
|
||||||
xdraw.BiLinear.Scale(thumb, thumb.Bounds(), processed, processed.Bounds(), xdraw.Over, nil)
|
|
||||||
|
|
||||||
return thumb, data.SliceLocation, center, width
|
|
||||||
}
|
|
||||||
|
|
||||||
func drawNumber(img *image.RGBA, x, y, num int) {
|
|
||||||
s := strconv.Itoa(num)
|
|
||||||
textWidth := measureString(s, 14) + 6
|
|
||||||
|
|
||||||
// Draw background
|
|
||||||
for dy := 0; dy < 18; dy++ {
|
|
||||||
for dx := 0; dx < textWidth; dx++ {
|
|
||||||
img.SetRGBA(x+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
drawStringSize(img, x+3, y+14, s, color.RGBA{255, 255, 0, 255}, 14)
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,10 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"image"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/chai2010/webp"
|
|
||||||
xdraw "golang.org/x/image/draw"
|
|
||||||
"inou/lib"
|
"inou/lib"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -31,31 +28,20 @@ func handleImage(w http.ResponseWriter, r *http.Request) {
|
||||||
opts.WW, _ = strconv.ParseFloat(ww, 64)
|
opts.WW, _ = strconv.ParseFloat(ww, 64)
|
||||||
}
|
}
|
||||||
|
|
||||||
img, err := lib.ImageGet(ctx.AccessorID, hexID, opts)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "Image not found", http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resize if either dimension exceeds maxDim (default 2000 for Claude API)
|
|
||||||
maxDim := 2000
|
maxDim := 2000
|
||||||
if md := r.URL.Query().Get("maxdim"); md != "" {
|
if md := r.URL.Query().Get("maxdim"); md != "" {
|
||||||
if v, err := strconv.Atoi(md); err == nil && v > 0 {
|
if v, err := strconv.Atoi(md); err == nil && v > 0 {
|
||||||
maxDim = v
|
maxDim = v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
bounds := img.Bounds()
|
|
||||||
w0, h0 := bounds.Dx(), bounds.Dy()
|
body, err := lib.RenderImage(ctx.AccessorID, hexID, opts, maxDim)
|
||||||
if w0 > maxDim || h0 > maxDim {
|
if err != nil {
|
||||||
scale := float64(maxDim) / float64(max(w0, h0))
|
http.Error(w, "Image not found", http.StatusNotFound)
|
||||||
newW := int(float64(w0) * scale)
|
return
|
||||||
newH := int(float64(h0) * scale)
|
|
||||||
resized := image.NewRGBA(image.Rect(0, 0, newW, newH))
|
|
||||||
xdraw.BiLinear.Scale(resized, resized.Bounds(), img, bounds, xdraw.Over, nil)
|
|
||||||
img = resized
|
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "image/webp")
|
w.Header().Set("Content-Type", "image/webp")
|
||||||
w.Header().Set("Cache-Control", "public, max-age=86400")
|
w.Header().Set("Cache-Control", "public, max-age=86400")
|
||||||
webp.Encode(w, img, &webp.Options{Lossless: true})
|
w.Write(body)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -122,6 +122,7 @@ func main() {
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
lib.ConfigInit()
|
lib.ConfigInit()
|
||||||
|
lib.RefDBInit("/tank/inou/data/reference.db")
|
||||||
fmt.Println("Normalizing test names...")
|
fmt.Println("Normalizing test names...")
|
||||||
if err := lib.Normalize(dossierID, lib.CategoryLab); err != nil {
|
if err := lib.Normalize(dossierID, lib.CategoryLab); err != nil {
|
||||||
fmt.Printf("Normalization failed: %v\n", err)
|
fmt.Printf("Normalization failed: %v\n", err)
|
||||||
|
|
@ -147,6 +148,7 @@ func main() {
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
lib.ConfigInit()
|
lib.ConfigInit()
|
||||||
|
lib.RefDBInit("/tank/inou/data/reference.db")
|
||||||
|
|
||||||
// Load existing lab entries for dedup
|
// Load existing lab entries for dedup
|
||||||
existing, err := lib.EntryQuery(nil, dossierID, lib.CategoryLab, "", "*")
|
existing, err := lib.EntryQuery(nil, dossierID, lib.CategoryLab, "", "*")
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,69 @@
|
||||||
|
# ChatGPT Actions Setup
|
||||||
|
|
||||||
|
Connect a ChatGPT Custom GPT to inou via OAuth 2.0 Actions.
|
||||||
|
|
||||||
|
## 1. Create the OAuth Client
|
||||||
|
|
||||||
|
SSH into staging or production and run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# One-time setup — creates the "chatgpt" OAuth client
|
||||||
|
ssh johan@192.168.1.253 '/tank/inou/bin/oauth-setup-chatgpt'
|
||||||
|
```
|
||||||
|
|
||||||
|
This prints the Client ID and Client Secret. **Save the secret** — it cannot be retrieved later.
|
||||||
|
|
||||||
|
> The binary doesn't exist yet. Either:
|
||||||
|
> - Add a `cmd/oauth-setup-chatgpt/main.go` (same pattern as `cmd/oauth-setup/main.go`), or
|
||||||
|
> - Call `CreateChatGPTClient()` from portal startup (like `EnsureBridgeClient()`).
|
||||||
|
|
||||||
|
## 2. Create the Custom GPT
|
||||||
|
|
||||||
|
In ChatGPT → Explore GPTs → Create:
|
||||||
|
|
||||||
|
1. **Name:** inou Health
|
||||||
|
2. **Instructions:** (your system prompt for health data analysis)
|
||||||
|
3. Click **Create new action**
|
||||||
|
|
||||||
|
## 3. Configure the Action
|
||||||
|
|
||||||
|
### Import Schema
|
||||||
|
|
||||||
|
Point to the hosted OpenAPI schema:
|
||||||
|
|
||||||
|
```
|
||||||
|
https://inou.com/api/docs/openapi.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
Or paste the contents of `docs/openapi.yaml` directly.
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
|
||||||
|
| Field | Value |
|
||||||
|
|-------|-------|
|
||||||
|
| **Authentication Type** | OAuth |
|
||||||
|
| **Client ID** | _(from step 1)_ |
|
||||||
|
| **Client Secret** | _(from step 1)_ |
|
||||||
|
| **Authorization URL** | `https://inou.com/oauth/authorize` |
|
||||||
|
| **Token URL** | `https://inou.com/oauth/token` |
|
||||||
|
| **Scope** | _(leave blank)_ |
|
||||||
|
| **Token Exchange Method** | Default (POST) |
|
||||||
|
|
||||||
|
### Privacy Policy URL
|
||||||
|
|
||||||
|
```
|
||||||
|
https://inou.com/privacy
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. Test
|
||||||
|
|
||||||
|
1. Save the GPT
|
||||||
|
2. Start a conversation: "List my dossiers"
|
||||||
|
3. ChatGPT will redirect to inou's OAuth login
|
||||||
|
4. After authorizing, the GPT can call the API
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- OAuth tokens are valid for 1 hour, with refresh token support
|
||||||
|
- All access is RBAC-enforced — the GPT can only see data the logged-in user has permission to view
|
||||||
|
- The OpenAPI schema excludes imaging endpoints (not useful for text-based ChatGPT interactions)
|
||||||
|
|
@ -0,0 +1,429 @@
|
||||||
|
openapi: 3.1.0
|
||||||
|
info:
|
||||||
|
title: inou Health API
|
||||||
|
version: 1.0.0
|
||||||
|
description: Access health data — dossiers, entries, labs, journals, trackers, and categories.
|
||||||
|
|
||||||
|
servers:
|
||||||
|
- url: https://inou.com
|
||||||
|
|
||||||
|
security:
|
||||||
|
- oauth2: []
|
||||||
|
|
||||||
|
components:
|
||||||
|
securitySchemes:
|
||||||
|
oauth2:
|
||||||
|
type: oauth2
|
||||||
|
flows:
|
||||||
|
authorizationCode:
|
||||||
|
authorizationUrl: https://inou.com/oauth/authorize
|
||||||
|
tokenUrl: https://inou.com/oauth/token
|
||||||
|
scopes: {}
|
||||||
|
|
||||||
|
schemas:
|
||||||
|
Error:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
Dossier:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
date_of_birth:
|
||||||
|
type: string
|
||||||
|
sex:
|
||||||
|
type: string
|
||||||
|
enum: [male, female, other]
|
||||||
|
categories:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
self:
|
||||||
|
type: boolean
|
||||||
|
description: True if this dossier belongs to the authenticated user.
|
||||||
|
|
||||||
|
DossierDetail:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
Entry:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
parent_id:
|
||||||
|
type: string
|
||||||
|
category:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
summary:
|
||||||
|
type: string
|
||||||
|
ordinal:
|
||||||
|
type: integer
|
||||||
|
timestamp:
|
||||||
|
type: integer
|
||||||
|
description: Unix timestamp (seconds).
|
||||||
|
|
||||||
|
EntryDetail:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
parent_id:
|
||||||
|
type: string
|
||||||
|
category:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
summary:
|
||||||
|
type: string
|
||||||
|
ordinal:
|
||||||
|
type: integer
|
||||||
|
timestamp:
|
||||||
|
type: integer
|
||||||
|
tags:
|
||||||
|
type: string
|
||||||
|
data:
|
||||||
|
type: object
|
||||||
|
description: Parsed JSON data (only when detail=full).
|
||||||
|
children:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
summary:
|
||||||
|
type: string
|
||||||
|
ordinal:
|
||||||
|
type: integer
|
||||||
|
|
||||||
|
Journal:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
summary:
|
||||||
|
type: string
|
||||||
|
timestamp:
|
||||||
|
type: integer
|
||||||
|
|
||||||
|
Tracker:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
category:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
question:
|
||||||
|
type: string
|
||||||
|
active:
|
||||||
|
type: boolean
|
||||||
|
dismissed:
|
||||||
|
type: boolean
|
||||||
|
time_of_day:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
Category:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: integer
|
||||||
|
key:
|
||||||
|
type: string
|
||||||
|
description: Machine-readable category name.
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: Translated display name.
|
||||||
|
types:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
paths:
|
||||||
|
/api/v1/dossiers:
|
||||||
|
get:
|
||||||
|
operationId: listDossiers
|
||||||
|
summary: List accessible dossiers
|
||||||
|
description: Returns all dossiers the authenticated user has access to, including their own.
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Array of dossiers.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/Dossier"
|
||||||
|
"401":
|
||||||
|
description: Unauthorized.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
||||||
|
/api/v1/dossiers/{dossier_id}:
|
||||||
|
get:
|
||||||
|
operationId: getDossier
|
||||||
|
summary: Get a single dossier
|
||||||
|
parameters:
|
||||||
|
- name: dossier_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Dossier detail.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/DossierDetail"
|
||||||
|
"403":
|
||||||
|
description: Access denied.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
"404":
|
||||||
|
description: Not found.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
||||||
|
/api/v1/dossiers/{dossier_id}/entries:
|
||||||
|
get:
|
||||||
|
operationId: listEntries
|
||||||
|
summary: List entries for a dossier
|
||||||
|
description: Query entries by category, type, date range, or parent. Returns summaries — use the single-entry endpoint with detail=full for complete data.
|
||||||
|
parameters:
|
||||||
|
- name: dossier_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: category
|
||||||
|
in: query
|
||||||
|
description: Filter by category name (e.g. "labs", "imaging", "medication").
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: type
|
||||||
|
in: query
|
||||||
|
description: Filter by entry type within the category.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: parent
|
||||||
|
in: query
|
||||||
|
description: Filter by parent entry ID (for navigating hierarchies).
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: search_key
|
||||||
|
in: query
|
||||||
|
description: Filter by search key (e.g. LOINC code for labs).
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: from
|
||||||
|
in: query
|
||||||
|
description: Start timestamp (Unix seconds).
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
- name: to
|
||||||
|
in: query
|
||||||
|
description: End timestamp (Unix seconds).
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
- name: limit
|
||||||
|
in: query
|
||||||
|
description: Maximum number of results.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Array of entries.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/Entry"
|
||||||
|
"403":
|
||||||
|
description: Access denied.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
||||||
|
/api/v1/dossiers/{dossier_id}/entries/{entry_id}:
|
||||||
|
get:
|
||||||
|
operationId: getEntry
|
||||||
|
summary: Get a single entry with optional full detail
|
||||||
|
parameters:
|
||||||
|
- name: dossier_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: entry_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: detail
|
||||||
|
in: query
|
||||||
|
description: Set to "full" to include the data field and children.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: [full]
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Entry with optional data and children.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/EntryDetail"
|
||||||
|
"404":
|
||||||
|
description: Not found.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
||||||
|
/api/v1/dossiers/{dossier_id}/journal:
|
||||||
|
get:
|
||||||
|
operationId: listJournals
|
||||||
|
summary: List journal entries
|
||||||
|
parameters:
|
||||||
|
- name: dossier_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: days
|
||||||
|
in: query
|
||||||
|
description: Look-back period in days (default 30).
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
- name: type
|
||||||
|
in: query
|
||||||
|
description: Filter by journal type.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Journal entries.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
journals:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/Journal"
|
||||||
|
"403":
|
||||||
|
description: Access denied.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
||||||
|
/api/v1/dossiers/{dossier_id}/trackers:
|
||||||
|
get:
|
||||||
|
operationId: listTrackers
|
||||||
|
summary: List tracker prompts
|
||||||
|
parameters:
|
||||||
|
- name: dossier_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: active
|
||||||
|
in: query
|
||||||
|
description: Set to "true" to return only active trackers.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: ["true"]
|
||||||
|
- name: category
|
||||||
|
in: query
|
||||||
|
description: Filter by category name.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: type
|
||||||
|
in: query
|
||||||
|
description: Filter by tracker type.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Array of trackers.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/Tracker"
|
||||||
|
"403":
|
||||||
|
description: Access denied.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
||||||
|
/api/v1/categories:
|
||||||
|
get:
|
||||||
|
operationId: listCategories
|
||||||
|
summary: List all data categories
|
||||||
|
description: Returns all 28 categories with translated names and available types.
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Array of categories.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/Category"
|
||||||
|
|
||||||
|
/api/v1/categories/{name}/types:
|
||||||
|
get:
|
||||||
|
operationId: listCategoryTypes
|
||||||
|
summary: List types for a category
|
||||||
|
parameters:
|
||||||
|
- name: name
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: Category name (e.g. "labs", "imaging").
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Array of type strings.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
"404":
|
||||||
|
description: Category not found.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
@ -1,20 +1,23 @@
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
-- Auth Database Schema (auth.db)
|
-- Auth Database Schema (auth.db)
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
-- Separate from medical data. Contains volatile OAuth/session data.
|
-- Separate from medical data (inou.db). Volatile/ephemeral data.
|
||||||
-- Tables are NOT auto-created. Use this file manually if needed.
|
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
|
|
||||||
-- OAuth Clients (Claude, Flutter app, etc.)
|
-- Sessions table for secure session management
|
||||||
CREATE TABLE IF NOT EXISTS oauth_clients (
|
-- Tokens are random 32-byte base64url-encoded strings
|
||||||
client_id TEXT PRIMARY KEY,
|
CREATE TABLE IF NOT EXISTS sessions (
|
||||||
client_secret TEXT NOT NULL,
|
token TEXT PRIMARY KEY,
|
||||||
name TEXT NOT NULL,
|
dossier_id TEXT NOT NULL,
|
||||||
redirect_uris TEXT NOT NULL, -- JSON array
|
created_at INTEGER NOT NULL,
|
||||||
created_at INTEGER NOT NULL
|
expires_at INTEGER NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
-- OAuth Authorization Codes (short-lived, single-use)
|
-- Index for fast session lookup and cleanup
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_dossier ON sessions(dossier_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_expires ON sessions(expires_at);
|
||||||
|
|
||||||
|
-- OAuth authorization codes (PKCE, 10 min expiry)
|
||||||
CREATE TABLE IF NOT EXISTS oauth_codes (
|
CREATE TABLE IF NOT EXISTS oauth_codes (
|
||||||
code TEXT PRIMARY KEY,
|
code TEXT PRIMARY KEY,
|
||||||
client_id TEXT NOT NULL,
|
client_id TEXT NOT NULL,
|
||||||
|
|
@ -26,17 +29,27 @@ CREATE TABLE IF NOT EXISTS oauth_codes (
|
||||||
used INTEGER DEFAULT 0
|
used INTEGER DEFAULT 0
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_oauth_codes_client ON oauth_codes(client_id);
|
-- OAuth refresh tokens (30 day expiry)
|
||||||
|
|
||||||
-- OAuth Refresh Tokens (long-lived, rotatable)
|
|
||||||
CREATE TABLE IF NOT EXISTS oauth_refresh_tokens (
|
CREATE TABLE IF NOT EXISTS oauth_refresh_tokens (
|
||||||
token_id TEXT PRIMARY KEY,
|
token_id TEXT PRIMARY KEY,
|
||||||
client_id TEXT NOT NULL,
|
client_id TEXT NOT NULL,
|
||||||
dossier_id TEXT NOT NULL,
|
dossier_id TEXT NOT NULL,
|
||||||
|
created_at INTEGER NOT NULL,
|
||||||
expires_at INTEGER NOT NULL,
|
expires_at INTEGER NOT NULL,
|
||||||
revoked INTEGER DEFAULT 0,
|
revoked INTEGER DEFAULT 0
|
||||||
|
);
|
||||||
|
|
||||||
|
-- OAuth clients (registered applications)
|
||||||
|
CREATE TABLE IF NOT EXISTS oauth_clients (
|
||||||
|
client_id TEXT PRIMARY KEY,
|
||||||
|
client_secret TEXT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
redirect_uris TEXT NOT NULL, -- JSON array
|
||||||
|
allowed_scopes TEXT, -- JSON array
|
||||||
created_at INTEGER NOT NULL
|
created_at INTEGER NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_oauth_refresh_dossier ON oauth_refresh_tokens(dossier_id);
|
-- Cleanup old sessions periodically
|
||||||
CREATE INDEX IF NOT EXISTS idx_oauth_refresh_client ON oauth_refresh_tokens(client_id);
|
DELETE FROM sessions WHERE expires_at < strftime('%s', 'now');
|
||||||
|
DELETE FROM oauth_codes WHERE expires_at < strftime('%s', 'now');
|
||||||
|
DELETE FROM oauth_refresh_tokens WHERE expires_at < strftime('%s', 'now');
|
||||||
|
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,314 @@
|
||||||
|
package lib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LoincInfo holds official LOINC data from loinc_lab.
|
||||||
|
type LoincInfo struct {
|
||||||
|
Code string `db:"loinc_num"`
|
||||||
|
LongName string `db:"long_name"`
|
||||||
|
ShortName string `db:"short_name"`
|
||||||
|
Component string `db:"component"`
|
||||||
|
System string `db:"system"`
|
||||||
|
Property string `db:"property"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoincGet returns official LOINC info from loinc_lab.
|
||||||
|
func LoincGet(code string) *LoincInfo {
|
||||||
|
var results []LoincInfo
|
||||||
|
RefQuery("SELECT loinc_num, long_name, short_name, component, system, property FROM loinc_lab WHERE loinc_num = ?", []any{code}, &results)
|
||||||
|
if len(results) > 0 {
|
||||||
|
return &results[0]
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoincAbbr derives a short abbreviation from loinc_lab.short_name.
|
||||||
|
// Examples: "Hgb Bld-mCnc" → "Hgb", "Neutrophils/leuk NFr Bld" → "Neut"
|
||||||
|
func LoincAbbr(info *LoincInfo) string {
|
||||||
|
if info == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
s := info.ShortName
|
||||||
|
// Take first token (before space), strip trailing /... for differentials
|
||||||
|
if i := strings.IndexByte(s, ' '); i > 0 {
|
||||||
|
s = s[:i]
|
||||||
|
}
|
||||||
|
if i := strings.IndexByte(s, '/'); i > 0 {
|
||||||
|
s = s[:i]
|
||||||
|
}
|
||||||
|
// Common abbreviation overrides
|
||||||
|
overrides := map[string]string{
|
||||||
|
"Neutrophils": "Neut", "Lymphocytes": "Lymph", "Monocytes": "Mono",
|
||||||
|
"Eosinophils": "Eos", "Basophils": "Baso", "Platelets": "PLT",
|
||||||
|
"Leukocytes": "WBC", "Erythrocytes": "RBC", "Hemoglobin": "Hgb",
|
||||||
|
"Hematocrit": "Hct", "Glucose": "Glu", "Creatinine": "Cr",
|
||||||
|
"Sodium": "Na", "Potassium": "K", "Chloride": "Cl",
|
||||||
|
"Calcium": "Ca", "Albumin": "Alb", "Phosphate": "Phos",
|
||||||
|
"Magnesium": "Mg",
|
||||||
|
}
|
||||||
|
// Match on component first word for overrides
|
||||||
|
comp := info.Component
|
||||||
|
if i := strings.IndexAny(comp, "/."); i > 0 {
|
||||||
|
comp = comp[:i]
|
||||||
|
}
|
||||||
|
if abbr, ok := overrides[comp]; ok {
|
||||||
|
return abbr
|
||||||
|
}
|
||||||
|
// Truncate long abbreviations
|
||||||
|
if len(s) > 8 {
|
||||||
|
s = s[:8]
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoincLookup resolves a hospital test name to a LOINC code.
|
||||||
|
// Checks cache first; on miss, uses Gemini expand → search → pick → cache.
|
||||||
|
// Returns empty string if lookup fails (no candidates, LLM error, etc).
|
||||||
|
func LoincLookup(name, specimen, unit string) string {
|
||||||
|
// 1. Check cache
|
||||||
|
cacheKey := strings.ToLower(name + "|" + specimen + "|" + unit)
|
||||||
|
var cached []struct {
|
||||||
|
LoincCode string `db:"loinc_code"`
|
||||||
|
}
|
||||||
|
RefQuery("SELECT loinc_code FROM loinc_cache WHERE cache_key = ?", []any{cacheKey}, &cached)
|
||||||
|
if len(cached) > 0 {
|
||||||
|
return cached[0].LoincCode
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. No Gemini key = can't do LLM lookup
|
||||||
|
if GeminiKey == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Expand + search + pick
|
||||||
|
lookupUnit := unit
|
||||||
|
if lookupUnit == "%" {
|
||||||
|
lookupUnit = "percentage"
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens := loincTokenize(name + " " + specimen + " " + lookupUnit)
|
||||||
|
if expanded, err := loincExpand(name, specimen, lookupUnit); err == nil {
|
||||||
|
tokens = expanded
|
||||||
|
}
|
||||||
|
|
||||||
|
candidates := loincSearch(tokens)
|
||||||
|
|
||||||
|
// Filter: if unit is %, drop count codes (NCnc)
|
||||||
|
if unit == "%" {
|
||||||
|
var filtered []LoincInfo
|
||||||
|
for _, c := range candidates {
|
||||||
|
if c.Property != "NCnc" {
|
||||||
|
filtered = append(filtered, c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(filtered) > 0 {
|
||||||
|
candidates = filtered
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(candidates) == 0 {
|
||||||
|
log.Printf("loinc-lookup: no candidates for %q", name)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. LLM pick
|
||||||
|
code, lname, err := loincPick(name, specimen, lookupUnit, candidates)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("loinc-lookup: pick failed for %q: %v", name, err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Cache
|
||||||
|
RefExec(`INSERT OR REPLACE INTO loinc_cache (cache_key, input_name, input_specimen, input_unit, loinc_code, loinc_name, confidence)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, 'llm')`, cacheKey, name, specimen, unit, code, lname)
|
||||||
|
|
||||||
|
return code
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- internal helpers ---
|
||||||
|
|
||||||
|
func loincTokenize(s string) []string {
|
||||||
|
s = strings.ToLower(s)
|
||||||
|
for _, c := range []string{",", ";", "(", ")", "[", "]", "/", "-", ".", ":"} {
|
||||||
|
s = strings.ReplaceAll(s, c, " ")
|
||||||
|
}
|
||||||
|
var tokens []string
|
||||||
|
seen := map[string]bool{}
|
||||||
|
for _, t := range strings.Fields(s) {
|
||||||
|
if len(t) < 2 || seen[t] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
tokens = append(tokens, t)
|
||||||
|
seen[t] = true
|
||||||
|
}
|
||||||
|
return tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
func loincSearch(tokens []string) []LoincInfo {
|
||||||
|
if len(tokens) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
type entry struct {
|
||||||
|
c LoincInfo
|
||||||
|
hits int
|
||||||
|
}
|
||||||
|
entries := map[string]*entry{}
|
||||||
|
|
||||||
|
for _, t := range tokens {
|
||||||
|
pattern := "%" + t + "%"
|
||||||
|
query := "SELECT loinc_num, long_name, short_name, system, component, property FROM loinc_lab WHERE " +
|
||||||
|
"LOWER(long_name) LIKE ? OR LOWER(short_name) LIKE ? OR LOWER(component) LIKE ?"
|
||||||
|
var results []LoincInfo
|
||||||
|
RefQuery(query, []any{pattern, pattern, pattern}, &results)
|
||||||
|
for _, c := range results {
|
||||||
|
if e, ok := entries[c.Code]; ok {
|
||||||
|
e.hits++
|
||||||
|
} else {
|
||||||
|
entries[c.Code] = &entry{c: c, hits: 1}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
minHits := 2
|
||||||
|
if len(tokens) <= 1 {
|
||||||
|
minHits = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
type scored struct {
|
||||||
|
c LoincInfo
|
||||||
|
score int
|
||||||
|
}
|
||||||
|
var scoredResults []scored
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.hits < minHits {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
s := e.hits * 100
|
||||||
|
compLen := len(e.c.Component)
|
||||||
|
if compLen > 0 && compLen < 50 {
|
||||||
|
s += 50 - compLen
|
||||||
|
}
|
||||||
|
if !strings.Contains(e.c.Component, "/") {
|
||||||
|
s += 20
|
||||||
|
}
|
||||||
|
scoredResults = append(scoredResults, scored{e.c, s})
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range scoredResults {
|
||||||
|
for j := i + 1; j < len(scoredResults); j++ {
|
||||||
|
if scoredResults[j].score > scoredResults[i].score {
|
||||||
|
scoredResults[i], scoredResults[j] = scoredResults[j], scoredResults[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var top []LoincInfo
|
||||||
|
for i, s := range scoredResults {
|
||||||
|
if i >= 30 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
top = append(top, s.c)
|
||||||
|
}
|
||||||
|
return top
|
||||||
|
}
|
||||||
|
|
||||||
|
func loincExpand(name, specimen, unit string) ([]string, error) {
|
||||||
|
prompt := fmt.Sprintf(`Given a lab test, return search terms to find it in the LOINC database.
|
||||||
|
LOINC uses formal medical terminology (e.g. "Leukocytes" not "White Blood Cells", "Erythrocytes" not "Red Blood Cells", "Oxygen" not "O2" or "pO2").
|
||||||
|
|
||||||
|
Lab test:
|
||||||
|
Name: %s
|
||||||
|
Specimen: %s
|
||||||
|
Unit: %s
|
||||||
|
|
||||||
|
Return a JSON object: {"terms": ["term1", "term2", ...]}
|
||||||
|
Include: the LOINC component name, specimen system code (e.g. Bld, BldA, BldC, BldV, Ser/Plas, Urine), and any synonyms that might appear in LOINC long names.
|
||||||
|
Keep it to 3-6 terms. JSON only.`, name, specimen, unit)
|
||||||
|
|
||||||
|
resp, err := CallGemini(prompt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var result struct {
|
||||||
|
Terms []string `json:"terms"`
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||||
|
return nil, fmt.Errorf("parse expand response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var terms []string
|
||||||
|
seen := map[string]bool{}
|
||||||
|
for _, t := range result.Terms {
|
||||||
|
t = strings.ToLower(strings.TrimSpace(t))
|
||||||
|
if t != "" && !seen[t] {
|
||||||
|
terms = append(terms, t)
|
||||||
|
seen[t] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, t := range loincTokenize(name + " " + specimen) {
|
||||||
|
if !seen[t] {
|
||||||
|
terms = append(terms, t)
|
||||||
|
seen[t] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return terms, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loincPick(name, specimen, unit string, candidates []LoincInfo) (string, string, error) {
|
||||||
|
var lines []string
|
||||||
|
for i, c := range candidates {
|
||||||
|
display := c.LongName
|
||||||
|
display = strings.ReplaceAll(display, "/100 ", "percentage of ")
|
||||||
|
display = strings.ReplaceAll(display, "fraction", "percentage")
|
||||||
|
lines = append(lines, fmt.Sprintf("%d. %s — %s [System: %s]", i+1, c.Code, display, c.System))
|
||||||
|
}
|
||||||
|
|
||||||
|
prompt := fmt.Sprintf(`You are a clinical laboratory informatics system. Given a lab test, pick the BEST matching LOINC code from the candidate list.
|
||||||
|
|
||||||
|
Lab test:
|
||||||
|
Name: %s
|
||||||
|
Specimen: %s
|
||||||
|
Unit: %s
|
||||||
|
|
||||||
|
Candidates:
|
||||||
|
%s
|
||||||
|
|
||||||
|
Return ONLY a JSON object: {"pick": <number>, "loinc": "<code>", "name": "<long name>"}
|
||||||
|
Pick the candidate that best matches the test name, specimen type, and unit. If none match well, pick the closest.
|
||||||
|
JSON only, no explanation.`, name, specimen, unit, strings.Join(lines, "\n"))
|
||||||
|
|
||||||
|
resp, err := CallGemini(prompt)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", fmt.Errorf("LLM call failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var result struct {
|
||||||
|
Pick int `json:"pick"`
|
||||||
|
Loinc string `json:"loinc"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||||
|
return "", "", fmt.Errorf("parse LLM response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Loinc == "" && result.Pick > 0 && result.Pick <= len(candidates) {
|
||||||
|
result.Loinc = candidates[result.Pick-1].Code
|
||||||
|
result.Name = candidates[result.Pick-1].LongName
|
||||||
|
}
|
||||||
|
for _, c := range candidates {
|
||||||
|
if c.Code == result.Loinc {
|
||||||
|
return result.Loinc, c.LongName, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if result.Pick > 0 && result.Pick <= len(candidates) {
|
||||||
|
c := candidates[result.Pick-1]
|
||||||
|
return c.Code, c.LongName, nil
|
||||||
|
}
|
||||||
|
return "", "", fmt.Errorf("LLM returned %q (pick %d) — not in %d candidates", result.Loinc, result.Pick, len(candidates))
|
||||||
|
}
|
||||||
271
lib/normalize.go
271
lib/normalize.go
|
|
@ -4,189 +4,129 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Normalize normalizes entry names within a dossier for a given category.
|
// Normalize resolves hospital test names to official LOINC codes and updates entries.
|
||||||
// Uses heuristic pre-grouping + LLM to map variant names to canonical forms.
|
// Flow: hospital name + specimen + unit → LOINC (via cache or Gemini lookup) → official name from loinc_lab.
|
||||||
// Updates Summary (display) and Data JSON (normalized_name, abbreviation).
|
// No Fireworks LLM. Original Type field is never modified.
|
||||||
// Original Type field is never modified.
|
|
||||||
// Silently returns nil if no API key is configured.
|
|
||||||
func Normalize(dossierID string, category int, progress ...func(processed, total int)) error {
|
func Normalize(dossierID string, category int, progress ...func(processed, total int)) error {
|
||||||
reportProgress := func(p, t int) {
|
reportProgress := func(p, t int) {
|
||||||
if len(progress) > 0 && progress[0] != nil {
|
if len(progress) > 0 && progress[0] != nil {
|
||||||
progress[0](p, t)
|
progress[0](p, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if FireworksKey == "" {
|
|
||||||
SendSignal("normalize: FIREWORKS_API_KEY not configured, skipping normalization")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 1. Load all entries, collect types only from entries that need normalization
|
// 1. Load all entries, build parent map for specimen lookup
|
||||||
entries, err := EntryQueryOld(dossierID, category, "")
|
entries, err := EntryQueryOld(dossierID, category, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("load entries: %w", err)
|
return fmt.Errorf("load entries: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
seen := make(map[string]bool)
|
parentMap := make(map[string]*Entry)
|
||||||
var allNames []string
|
for _, e := range entries {
|
||||||
|
if e.ParentID == "" || e.Type == "lab_order" {
|
||||||
|
parentMap[e.EntryID] = e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Collect unique type|specimen|unit combos, resolve each to LOINC
|
||||||
|
type testKey struct {
|
||||||
|
name, specimen, unit string
|
||||||
|
}
|
||||||
|
type resolved struct {
|
||||||
|
loinc string
|
||||||
|
info *LoincInfo
|
||||||
|
abbr string
|
||||||
|
}
|
||||||
|
cache := make(map[testKey]*resolved)
|
||||||
|
var lookupCount, cacheHits, misses int
|
||||||
|
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
if e.ParentID == "" || e.Type == "lab_order" || e.Type == "" {
|
if e.ParentID == "" || e.Type == "lab_order" || e.Type == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// FIXED(review-2026-02-28): Skip only if FULLY normalized (has both SearchKey2 AND LOINC)
|
var data map[string]interface{}
|
||||||
// Previously skipped on SearchKey2 alone, causing LOINC to never be populated
|
json.Unmarshal([]byte(e.Data), &data)
|
||||||
if e.SearchKey2 != "" {
|
unit, _ := data["unit"].(string)
|
||||||
var data map[string]interface{}
|
|
||||||
json.Unmarshal([]byte(e.Data), &data)
|
|
||||||
if loinc, ok := data["loinc"].(string); ok && loinc != "" {
|
|
||||||
continue // fully normalized
|
|
||||||
}
|
|
||||||
// Has SearchKey2 but no LOINC - needs normalization
|
|
||||||
}
|
|
||||||
if !seen[e.Type] {
|
|
||||||
seen[e.Type] = true
|
|
||||||
allNames = append(allNames, e.Type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(allNames) == 0 {
|
specimen := ""
|
||||||
log.Printf("normalize: all entries already normalized")
|
if parent, ok := parentMap[e.ParentID]; ok {
|
||||||
return nil
|
var pdata map[string]interface{}
|
||||||
}
|
if json.Unmarshal([]byte(parent.Data), &pdata) == nil {
|
||||||
|
specimen, _ = pdata["specimen"].(string)
|
||||||
// 2. Pre-group by heuristic key (strip POCT, specimen suffixes, normalize case)
|
|
||||||
groups := make(map[string][]string) // cleanKey → [original names]
|
|
||||||
for _, name := range allNames {
|
|
||||||
key := normalizeKey(name)
|
|
||||||
groups[key] = append(groups[key], name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send just the group keys to LLM
|
|
||||||
keys := make([]string, 0, len(groups))
|
|
||||||
for k := range groups {
|
|
||||||
keys = append(keys, k)
|
|
||||||
}
|
|
||||||
sort.Strings(keys)
|
|
||||||
|
|
||||||
log.Printf("normalize: %d unique types → %d groups after pre-grouping", len(allNames), len(keys))
|
|
||||||
|
|
||||||
// 3. Call LLM with group keys (batched to stay within token limits)
|
|
||||||
mapping := make(map[string]normMapping)
|
|
||||||
batchSize := 50
|
|
||||||
for i := 0; i < len(keys); i += batchSize {
|
|
||||||
end := i + batchSize
|
|
||||||
if end > len(keys) {
|
|
||||||
end = len(keys)
|
|
||||||
}
|
|
||||||
batch := keys[i:end]
|
|
||||||
reportProgress(end, len(keys))
|
|
||||||
log.Printf("normalize: LLM batch %d-%d of %d", i+1, end, len(keys))
|
|
||||||
|
|
||||||
batchMap, err := callNormalizeLLM(batch)
|
|
||||||
if err != nil {
|
|
||||||
SendSignal(fmt.Sprintf("normalize: LLM batch %d-%d failed: %v", i+1, end, err))
|
|
||||||
return fmt.Errorf("LLM batch %d-%d: %w", i+1, end, err)
|
|
||||||
}
|
|
||||||
for k, v := range batchMap {
|
|
||||||
mapping[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4. Expand: each original name in a group gets the group's canonical mapping
|
|
||||||
fullMapping := make(map[string]normMapping)
|
|
||||||
for key, origNames := range groups {
|
|
||||||
if m, ok := mapping[key]; ok {
|
|
||||||
for _, orig := range origNames {
|
|
||||||
fullMapping[orig] = m
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
log.Printf("normalize: LLM mapped %d groups → %d original names covered", len(mapping), len(fullMapping))
|
tk := testKey{e.Type, specimen, unit}
|
||||||
|
if _, ok := cache[tk]; ok {
|
||||||
// 5. Save LabTest entries for any new LOINC codes
|
|
||||||
seenLoinc := make(map[string]bool)
|
|
||||||
var labTests []LabTest
|
|
||||||
for _, m := range fullMapping {
|
|
||||||
if m.Loinc == "" || seenLoinc[m.Loinc] {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
seenLoinc[m.Loinc] = true
|
lookupCount++
|
||||||
dir := m.Direction
|
|
||||||
if dir == "" {
|
loinc := LoincLookup(e.Type, specimen, unit)
|
||||||
dir = DirRange
|
if loinc == "" {
|
||||||
|
cache[tk] = &resolved{}
|
||||||
|
misses++
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
factor := m.SIFactor
|
|
||||||
if factor == 0 {
|
info := LoincGet(loinc)
|
||||||
factor = 1.0
|
abbr := LoincAbbr(info)
|
||||||
|
cache[tk] = &resolved{loinc: loinc, info: info, abbr: abbr}
|
||||||
|
if info != nil {
|
||||||
|
cacheHits++
|
||||||
}
|
}
|
||||||
labTests = append(labTests, LabTest{
|
|
||||||
LoincID: m.Loinc,
|
|
||||||
Name: m.Name,
|
|
||||||
SIUnit: m.SIUnit,
|
|
||||||
Direction: dir,
|
|
||||||
SIFactor: ToLabScale(factor),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
for _, t := range labTests {
|
|
||||||
RefExec(`INSERT OR IGNORE INTO lab_test (loinc_id, name, si_unit, direction, si_factor) VALUES (?, ?, ?, ?, ?)`,
|
|
||||||
t.LoincID, t.Name, t.SIUnit, t.Direction, t.SIFactor)
|
|
||||||
}
|
|
||||||
if len(labTests) > 0 {
|
|
||||||
log.Printf("normalize: saved %d lab tests", len(labTests))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5. Apply mapping to loaded entries, save only changed ones
|
reportProgress(lookupCount, lookupCount)
|
||||||
|
log.Printf("normalize: %d unique combos, %d resolved, %d unresolved", lookupCount, cacheHits, misses)
|
||||||
|
|
||||||
|
// 3. Apply to entries
|
||||||
var toSave []Entry
|
var toSave []Entry
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
if e.ParentID == "" {
|
if e.ParentID == "" || e.Type == "lab_order" || e.Type == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
norm, ok := fullMapping[e.Type]
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
var data map[string]interface{}
|
var data map[string]interface{}
|
||||||
if json.Unmarshal([]byte(e.Data), &data) != nil {
|
if json.Unmarshal([]byte(e.Data), &data) != nil {
|
||||||
data = make(map[string]interface{})
|
data = make(map[string]interface{})
|
||||||
}
|
}
|
||||||
|
unit, _ := data["unit"].(string)
|
||||||
|
|
||||||
// Skip if already fully normalized
|
specimen := ""
|
||||||
existingName, _ := data["normalized_name"].(string)
|
if parent, ok := parentMap[e.ParentID]; ok {
|
||||||
existingLoinc, _ := data["loinc"].(string)
|
var pdata map[string]interface{}
|
||||||
needsSearchKey := (norm.Loinc != "" && e.SearchKey == "")
|
if json.Unmarshal([]byte(parent.Data), &pdata) == nil {
|
||||||
needsSearchKey2 := e.SearchKey2 == ""
|
specimen, _ = pdata["specimen"].(string)
|
||||||
if existingName == norm.Name && (norm.Loinc == "" || existingLoinc == norm.Loinc) && !needsSearchKey && !needsSearchKey2 {
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r := cache[testKey{e.Type, specimen, unit}]
|
||||||
|
if r == nil || r.loinc == "" || r.info == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
data["normalized_name"] = norm.Name
|
// Check if already up to date
|
||||||
data["abbreviation"] = norm.Abbr
|
existingLoinc, _ := data["loinc"].(string)
|
||||||
if norm.Loinc != "" {
|
existingName, _ := data["normalized_name"].(string)
|
||||||
data["loinc"] = norm.Loinc
|
if existingLoinc == r.loinc && existingName == r.info.LongName && e.SearchKey == r.loinc && e.SearchKey2 != "" {
|
||||||
}
|
continue
|
||||||
if norm.SIUnit != "" {
|
|
||||||
data["si_unit"] = norm.SIUnit
|
|
||||||
}
|
|
||||||
if norm.SIFactor != 0 && norm.SIFactor != 1.0 {
|
|
||||||
data["si_factor"] = norm.SIFactor
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update Data JSON
|
||||||
|
data["loinc"] = r.loinc
|
||||||
|
data["normalized_name"] = r.info.LongName
|
||||||
|
data["abbreviation"] = r.abbr
|
||||||
b, _ := json.Marshal(data)
|
b, _ := json.Marshal(data)
|
||||||
e.Data = string(b)
|
e.Data = string(b)
|
||||||
|
|
||||||
// Update SearchKey with LOINC code, SearchKey2 with canonical test name
|
// Update search keys
|
||||||
if norm.Loinc != "" {
|
e.SearchKey = r.loinc
|
||||||
e.SearchKey = norm.Loinc
|
e.SearchKey2 = strings.ToLower(r.info.LongName)
|
||||||
}
|
|
||||||
e.SearchKey2 = strings.ToLower(norm.Name)
|
|
||||||
|
|
||||||
// Rebuild Summary: "Abbr: value unit"
|
// Rebuild Summary: "Abbr: value unit"
|
||||||
unit, _ := data["unit"].(string)
|
summary := r.abbr + ": " + e.Value
|
||||||
summary := norm.Abbr + ": " + e.Value
|
|
||||||
if unit != "" {
|
if unit != "" {
|
||||||
summary += " " + unit
|
summary += " " + unit
|
||||||
}
|
}
|
||||||
|
|
@ -207,62 +147,3 @@ func Normalize(dossierID string, category int, progress ...func(processed, total
|
||||||
}
|
}
|
||||||
return EntryWrite("", ptrs...)
|
return EntryWrite("", ptrs...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// normalizeKey reduces a test name to a heuristic grouping key.
|
|
||||||
// Groups obvious duplicates: POCT variants, specimen suffixes, case.
|
|
||||||
func normalizeKey(name string) string {
|
|
||||||
s := strings.ToLower(strings.TrimSpace(name))
|
|
||||||
s = strings.TrimPrefix(s, "poct ")
|
|
||||||
// Strip specimen-type suffixes only (not qualifiers like ", total", ", direct")
|
|
||||||
for _, suf := range []string{", whole blood", ", wblood", ", wb", ", wbl", ", blood", ", s/p", " ach"} {
|
|
||||||
s = strings.TrimSuffix(s, suf)
|
|
||||||
}
|
|
||||||
return strings.TrimSpace(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
type normMapping struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Abbr string `json:"abbr"`
|
|
||||||
Loinc string `json:"loinc"`
|
|
||||||
SIUnit string `json:"si_unit"`
|
|
||||||
SIFactor float64 `json:"si_factor"`
|
|
||||||
Direction string `json:"direction"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func callNormalizeLLM(names []string) (map[string]normMapping, error) {
|
|
||||||
nameList := strings.Join(names, "\n")
|
|
||||||
|
|
||||||
prompt := fmt.Sprintf(`Normalize these medical test names. Return ONLY a JSON object, no explanation.
|
|
||||||
|
|
||||||
Each key is the EXACT input name. Value format: {"name":"Canonical Name","abbr":"Abbreviation","loinc":"LOINC","si_unit":"unit","si_factor":1.0,"direction":"range"}
|
|
||||||
|
|
||||||
Key LOINC codes: WBC=6690-2, RBC=789-8, Hemoglobin=718-7, Hematocrit=4544-3, MCV=787-2, MCH=785-6, MCHC=786-4, RDW=788-0, Platelets=777-3, Neutrophils%%=770-8, Lymphocytes%%=736-9, Monocytes%%=5905-5, Eosinophils%%=713-8, Basophils%%=706-2, Glucose=2345-7, BUN=3094-0, Creatinine=2160-0, Sodium=2951-2, Potassium=2823-3, Chloride=2075-0, CO2=2028-9, Calcium=17861-6, Total Protein=2885-2, Albumin=1751-7, Total Bilirubin=1975-2, ALP=6768-6, AST=1920-8, ALT=1742-6.
|
|
||||||
|
|
||||||
Abbreviations: WBC, RBC, Hgb, Hct, MCV, MCH, MCHC, RDW, PLT, Neut, Lymph, Mono, Eos, Baso, Glu, BUN, Cr, Na, K, Cl, CO2, Ca, TP, Alb, Bili, ALP, AST, ALT, Mg, Phos, Fe, etc.
|
|
||||||
si_factor: conventional→SI multiplier (e.g. Hgb g/dL→g/L=10.0). Use 1.0 if same or unknown.
|
|
||||||
direction: "range" (default), "lower_better" (CRP, LDL, glucose), "higher_better" (HDL).
|
|
||||||
|
|
||||||
Test names:
|
|
||||||
%s`, nameList)
|
|
||||||
|
|
||||||
messages := []map[string]interface{}{
|
|
||||||
{"role": "user", "content": prompt},
|
|
||||||
}
|
|
||||||
resp, err := CallFireworks("accounts/fireworks/models/qwen3-vl-30b-a3b-instruct", messages, 4096)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
resp = strings.TrimSpace(resp)
|
|
||||||
resp = strings.TrimPrefix(resp, "```json")
|
|
||||||
resp = strings.TrimPrefix(resp, "```")
|
|
||||||
resp = strings.TrimSuffix(resp, "```")
|
|
||||||
resp = strings.TrimSpace(resp)
|
|
||||||
|
|
||||||
var mapping map[string]normMapping
|
|
||||||
if err := json.Unmarshal([]byte(resp), &mapping); err != nil {
|
|
||||||
return nil, fmt.Errorf("parse response: %w (first 500 chars: %.500s)", err, resp)
|
|
||||||
}
|
|
||||||
|
|
||||||
return mapping, nil
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,374 @@
|
||||||
|
package lib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
_ "embed"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"image"
|
||||||
|
"image/color"
|
||||||
|
"image/draw"
|
||||||
|
"image/png"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/chai2010/webp"
|
||||||
|
"golang.org/x/image/font"
|
||||||
|
"golang.org/x/image/font/opentype"
|
||||||
|
"golang.org/x/image/math/fixed"
|
||||||
|
|
||||||
|
xdraw "golang.org/x/image/draw"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed Sora-Regular.ttf
|
||||||
|
var soraRegularData []byte
|
||||||
|
|
||||||
|
//go:embed Sora-SemiBold.ttf
|
||||||
|
var soraSemiBoldData []byte
|
||||||
|
|
||||||
|
var (
|
||||||
|
soraFace14 font.Face
|
||||||
|
soraFace12 font.Face
|
||||||
|
soraBoldFace14 font.Face
|
||||||
|
soraBoldFace20 font.Face
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
regular, err := opentype.Parse(soraRegularData)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
semibold, err := opentype.Parse(soraSemiBoldData)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
soraFace14, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 14, DPI: 72})
|
||||||
|
soraFace12, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 12, DPI: 72})
|
||||||
|
soraBoldFace14, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 14, DPI: 72})
|
||||||
|
soraBoldFace20, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 20, DPI: 72})
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderImage returns a DICOM slice as webp bytes, resized to maxDim.
|
||||||
|
func RenderImage(accessorID, id string, opts *ImageOpts, maxDim int) ([]byte, error) {
|
||||||
|
img, err := ImageGet(accessorID, id, opts)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if maxDim <= 0 {
|
||||||
|
maxDim = 2000
|
||||||
|
}
|
||||||
|
bounds := img.Bounds()
|
||||||
|
w0, h0 := bounds.Dx(), bounds.Dy()
|
||||||
|
if w0 > maxDim || h0 > maxDim {
|
||||||
|
scale := float64(maxDim) / float64(max(w0, h0))
|
||||||
|
newW := int(float64(w0) * scale)
|
||||||
|
newH := int(float64(h0) * scale)
|
||||||
|
resized := image.NewRGBA(image.Rect(0, 0, newW, newH))
|
||||||
|
xdraw.BiLinear.Scale(resized, resized.Bounds(), img, bounds, xdraw.Over, nil)
|
||||||
|
img = resized
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err := webp.Encode(&buf, img, &webp.Options{Lossless: true}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderContactSheet returns a contact sheet webp for a series.
|
||||||
|
func RenderContactSheet(accessorID, seriesID string, wc, ww float64) ([]byte, error) {
|
||||||
|
series, err := entryGetByID(accessorID, seriesID)
|
||||||
|
if err != nil || series == nil {
|
||||||
|
return nil, fmt.Errorf("series not found")
|
||||||
|
}
|
||||||
|
dossierID := series.DossierID
|
||||||
|
seriesDesc := series.Tags
|
||||||
|
var seriesData struct {
|
||||||
|
Modality string `json:"modality"`
|
||||||
|
}
|
||||||
|
json.Unmarshal([]byte(series.Data), &seriesData)
|
||||||
|
|
||||||
|
study, err := entryGetByID("", series.ParentID)
|
||||||
|
if err != nil || study == nil {
|
||||||
|
return nil, fmt.Errorf("study not found")
|
||||||
|
}
|
||||||
|
var studyData struct {
|
||||||
|
StudyDate string `json:"study_date"`
|
||||||
|
StudyDesc string `json:"study_description"`
|
||||||
|
}
|
||||||
|
json.Unmarshal([]byte(study.Data), &studyData)
|
||||||
|
|
||||||
|
dossier, _ := DossierGet("", dossierID)
|
||||||
|
patientName := ""
|
||||||
|
if dossier != nil {
|
||||||
|
patientName = dossier.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := EntryChildrenByType(dossierID, seriesID, "slice")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(entries) == 0 {
|
||||||
|
return nil, fmt.Errorf("no slices found")
|
||||||
|
}
|
||||||
|
|
||||||
|
var firstSliceData struct {
|
||||||
|
SliceThickness float64 `json:"slice_thickness"`
|
||||||
|
}
|
||||||
|
json.Unmarshal([]byte(entries[0].Data), &firstSliceData)
|
||||||
|
step := csStepSize(5.0, firstSliceData.SliceThickness)
|
||||||
|
|
||||||
|
hasOverride := wc != 0 || ww != 0
|
||||||
|
|
||||||
|
type thumbInfo struct {
|
||||||
|
img image.Image
|
||||||
|
sliceNum int
|
||||||
|
pos float64
|
||||||
|
entryID string
|
||||||
|
}
|
||||||
|
var thumbs []thumbInfo
|
||||||
|
var usedWC, usedWW float64
|
||||||
|
for i, e := range entries {
|
||||||
|
if (i+1)%step != 1 && step != 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
thumb, pos, twc, tww := csLoadThumb(accessorID, e, wc, ww, hasOverride)
|
||||||
|
if thumb != nil {
|
||||||
|
thumbs = append(thumbs, thumbInfo{img: thumb, sliceNum: i + 1, pos: pos, entryID: e.EntryID})
|
||||||
|
if usedWC == 0 && usedWW == 0 {
|
||||||
|
usedWC, usedWW = twc, tww
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(thumbs) == 0 {
|
||||||
|
return nil, fmt.Errorf("could not load any images")
|
||||||
|
}
|
||||||
|
|
||||||
|
const thumbSz = 128
|
||||||
|
const hdrHeight = 58
|
||||||
|
const gridCols = 12
|
||||||
|
const pad = 2
|
||||||
|
|
||||||
|
activeCols := gridCols
|
||||||
|
if len(thumbs) < activeCols {
|
||||||
|
activeCols = len(thumbs)
|
||||||
|
}
|
||||||
|
gridRows := (len(thumbs) + gridCols - 1) / gridCols
|
||||||
|
|
||||||
|
outWidth := gridCols*thumbSz + (gridCols-1)*pad
|
||||||
|
outHeight := hdrHeight + gridRows*thumbSz + (gridRows-1)*pad
|
||||||
|
|
||||||
|
out := image.NewRGBA(image.Rect(0, 0, outWidth, outHeight))
|
||||||
|
draw.Draw(out, out.Bounds(), &image.Uniform{color.RGBA{80, 80, 80, 255}}, image.Point{}, draw.Src)
|
||||||
|
|
||||||
|
csDrawHeader(out, hdrHeight, patientName, studyData.StudyDesc, seriesDesc, seriesData.Modality, studyData.StudyDate, len(entries), firstSliceData.SliceThickness, step, usedWC, usedWW)
|
||||||
|
|
||||||
|
for i, t := range thumbs {
|
||||||
|
col := i % gridCols
|
||||||
|
row := i / gridCols
|
||||||
|
x := col * (thumbSz + pad)
|
||||||
|
y := hdrHeight + row*(thumbSz+pad)
|
||||||
|
draw.Draw(out, image.Rect(x, y, x+thumbSz, y+thumbSz), t.img, image.Point{}, draw.Src)
|
||||||
|
csDrawNumber(out, x+2, y+2, t.sliceNum)
|
||||||
|
csDrawPosition(out, x+thumbSz-2, y+2, t.pos)
|
||||||
|
csDrawHexID(out, x+2, y+thumbSz-4, t.entryID)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := len(thumbs); i < gridRows*gridCols; i++ {
|
||||||
|
col := i % gridCols
|
||||||
|
row := i / gridCols
|
||||||
|
x := col * (thumbSz + pad)
|
||||||
|
y := hdrHeight + row*(thumbSz+pad)
|
||||||
|
draw.Draw(out, image.Rect(x, y, x+thumbSz, y+thumbSz), &image.Uniform{color.Black}, image.Point{}, draw.Src)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err := webp.Encode(&buf, out, &webp.Options{Quality: 10}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- contact sheet helpers ---
|
||||||
|
|
||||||
|
func csStepSize(spacingMM, thicknessMM float64) int {
|
||||||
|
if thicknessMM <= 0 {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
step := int(math.Round(spacingMM / thicknessMM))
|
||||||
|
if step < 1 {
|
||||||
|
step = 1
|
||||||
|
}
|
||||||
|
return step
|
||||||
|
}
|
||||||
|
|
||||||
|
func csLoadThumb(accessorID string, e *Entry, wcOverride, wwOverride float64, hasOverride bool) (image.Image, float64, float64, float64) {
|
||||||
|
var data struct {
|
||||||
|
WindowCenter float64 `json:"window_center"`
|
||||||
|
WindowWidth float64 `json:"window_width"`
|
||||||
|
PixelMin int `json:"pixel_min"`
|
||||||
|
PixelMax int `json:"pixel_max"`
|
||||||
|
SliceLocation float64 `json:"slice_location"`
|
||||||
|
}
|
||||||
|
json.Unmarshal([]byte(e.Data), &data)
|
||||||
|
|
||||||
|
var center, width float64
|
||||||
|
if hasOverride {
|
||||||
|
center, width = wcOverride, wwOverride
|
||||||
|
if width == 0 {
|
||||||
|
width = 1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
center, width = data.WindowCenter, data.WindowWidth
|
||||||
|
if center == 0 && width == 0 {
|
||||||
|
center = float64(data.PixelMin+data.PixelMax) / 2
|
||||||
|
width = float64(data.PixelMax - data.PixelMin)
|
||||||
|
if width == 0 {
|
||||||
|
width = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dec, err := ObjectRead(&AccessContext{AccessorID: accessorID}, e.DossierID, e.EntryID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, 0, 0
|
||||||
|
}
|
||||||
|
img, err := png.Decode(bytes.NewReader(dec))
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, 0, 0
|
||||||
|
}
|
||||||
|
|
||||||
|
const thumbSz = 128
|
||||||
|
bounds := img.Bounds()
|
||||||
|
var processed image.Image
|
||||||
|
|
||||||
|
switch src := img.(type) {
|
||||||
|
case *image.Gray16:
|
||||||
|
low, high := center-width/2, center+width/2
|
||||||
|
gray := image.NewGray(bounds)
|
||||||
|
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
|
||||||
|
for x := bounds.Min.X; x < bounds.Max.X; x++ {
|
||||||
|
v := float64(src.Gray16At(x, y).Y)
|
||||||
|
var out uint8
|
||||||
|
if v <= low {
|
||||||
|
out = 0
|
||||||
|
} else if v >= high {
|
||||||
|
out = 255
|
||||||
|
} else {
|
||||||
|
out = uint8((v - low) * 255 / width)
|
||||||
|
}
|
||||||
|
gray.SetGray(x, y, color.Gray{Y: out})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
processed = gray
|
||||||
|
case *image.RGBA, *image.NRGBA:
|
||||||
|
processed = src
|
||||||
|
default:
|
||||||
|
return nil, 0, 0, 0
|
||||||
|
}
|
||||||
|
|
||||||
|
thumb := image.NewRGBA(image.Rect(0, 0, thumbSz, thumbSz))
|
||||||
|
xdraw.BiLinear.Scale(thumb, thumb.Bounds(), processed, processed.Bounds(), xdraw.Over, nil)
|
||||||
|
return thumb, data.SliceLocation, center, width
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawHeader(img *image.RGBA, hdrHeight int, patient, study, series, modality, date string, totalSlices int, sliceThickness float64, step int, wc, ww float64) {
|
||||||
|
if len(date) == 8 {
|
||||||
|
date = date[0:4] + "-" + date[4:6] + "-" + date[6:8]
|
||||||
|
}
|
||||||
|
draw.Draw(img, image.Rect(0, 0, img.Bounds().Dx(), hdrHeight),
|
||||||
|
&image.Uniform{color.RGBA{32, 32, 32, 255}}, image.Point{}, draw.Src)
|
||||||
|
|
||||||
|
csDrawStringBold20(img, 10, 22, "!! NAVIGATION ONLY - USE fetch_image FOR DIAGNOSIS !!", color.RGBA{255, 50, 50, 255})
|
||||||
|
|
||||||
|
stInfo := strconv.FormatFloat(sliceThickness, 'f', 1, 64) + "mm"
|
||||||
|
if step > 1 {
|
||||||
|
stInfo += " (every " + strconv.Itoa(step) + ")"
|
||||||
|
}
|
||||||
|
wlInfo := "WC:" + strconv.FormatFloat(wc, 'f', 0, 64) + " WW:" + strconv.FormatFloat(ww, 'f', 0, 64)
|
||||||
|
|
||||||
|
csDrawStringBold14(img, 10, 38, patient, color.RGBA{255, 255, 255, 255})
|
||||||
|
patientWidth := csMeasureStringBold(patient)
|
||||||
|
rest := " | " + modality + " " + series + " | " + date + " | " + strconv.Itoa(totalSlices) + " slices | ST " + stInfo + " | " + wlInfo
|
||||||
|
csDrawString14(img, 10+patientWidth, 38, rest, color.RGBA{200, 200, 200, 255})
|
||||||
|
|
||||||
|
instructions := "Top-left: slice# Top-right: position(mm) Bottom: entry ID for fetch_image"
|
||||||
|
csDrawString14(img, 10, 54, instructions, color.RGBA{255, 255, 255, 255})
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawNumber(img *image.RGBA, x, y, num int) {
|
||||||
|
s := strconv.Itoa(num)
|
||||||
|
textWidth := csMeasureString(s, 14) + 6
|
||||||
|
for dy := 0; dy < 18; dy++ {
|
||||||
|
for dx := 0; dx < textWidth; dx++ {
|
||||||
|
img.SetRGBA(x+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
csDrawStringSize(img, x+3, y+14, s, color.RGBA{255, 255, 0, 255}, 14)
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawHexID(img *image.RGBA, x, y int, id string) {
|
||||||
|
csDrawStringSize(img, x+4, y, id, color.RGBA{255, 255, 0, 255}, 12)
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawPosition(img *image.RGBA, x, y int, pos float64) {
|
||||||
|
s := strconv.FormatFloat(pos, 'f', 1, 64) + "mm"
|
||||||
|
textWidth := csMeasureString(s, 12) + 6
|
||||||
|
for dy := 0; dy < 16; dy++ {
|
||||||
|
for dx := 0; dx < textWidth; dx++ {
|
||||||
|
img.SetRGBA(x-textWidth+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
csDrawStringSize(img, x-textWidth+3, y+12, s, color.RGBA{255, 255, 0, 255}, 12)
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawString14(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||||
|
csDrawStringSize(img, x, y, s, col, 14)
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawStringSize(img *image.RGBA, x, y int, s string, col color.RGBA, size int) {
|
||||||
|
face := soraFace14
|
||||||
|
if size <= 12 {
|
||||||
|
face = soraFace12
|
||||||
|
}
|
||||||
|
if face == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
d := &font.Drawer{Dst: img, Src: &image.Uniform{col}, Face: face, Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)}}
|
||||||
|
d.DrawString(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawStringBold14(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||||
|
if soraBoldFace14 == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
d := &font.Drawer{Dst: img, Src: &image.Uniform{col}, Face: soraBoldFace14, Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)}}
|
||||||
|
d.DrawString(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func csDrawStringBold20(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||||
|
if soraBoldFace20 == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
d := &font.Drawer{Dst: img, Src: &image.Uniform{col}, Face: soraBoldFace20, Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)}}
|
||||||
|
d.DrawString(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func csMeasureString(s string, size int) int {
|
||||||
|
face := soraFace14
|
||||||
|
if size <= 12 {
|
||||||
|
face = soraFace12
|
||||||
|
}
|
||||||
|
if face == nil {
|
||||||
|
return len(s) * 8
|
||||||
|
}
|
||||||
|
return (&font.Drawer{Face: face}).MeasureString(s).Ceil()
|
||||||
|
}
|
||||||
|
|
||||||
|
func csMeasureStringBold(s string) int {
|
||||||
|
if soraBoldFace14 == nil {
|
||||||
|
return len(s) * 8
|
||||||
|
}
|
||||||
|
return (&font.Drawer{Face: soraBoldFace14}).MeasureString(s).Ceil()
|
||||||
|
}
|
||||||
|
|
@ -123,11 +123,6 @@ func EntryTypes(dossierID string, category int) ([]string, error) {
|
||||||
|
|
||||||
// --- Dossier stubs ---
|
// --- Dossier stubs ---
|
||||||
|
|
||||||
func DossierGetBySessionToken(token string) *Dossier {
|
|
||||||
log.Printf("[STUB] DossierGetBySessionToken")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func DossierQuery(accessorID string) ([]*DossierQueryRow, error) {
|
func DossierQuery(accessorID string) ([]*DossierQueryRow, error) {
|
||||||
// Get all accessible dossier profiles via RBAC
|
// Get all accessible dossier profiles via RBAC
|
||||||
dossierEntries, err := EntryRead(accessorID, "", &Filter{Category: 0})
|
dossierEntries, err := EntryRead(accessorID, "", &Filter{Category: 0})
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,720 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>inou — Genetics</title>
|
||||||
|
<style>
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Regular.ttf'); font-weight: 400; }
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Light.ttf'); font-weight: 300; }
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-SemiBold.ttf'); font-weight: 600; }
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Bold.ttf'); font-weight: 700; }
|
||||||
|
|
||||||
|
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||||
|
|
||||||
|
:root {
|
||||||
|
--amber: #B45309;
|
||||||
|
--amber-light: #FEF3C7;
|
||||||
|
--amber-mid: #F59E0B;
|
||||||
|
--bg: #F8F7F6;
|
||||||
|
--surface: #FFFFFF;
|
||||||
|
--border: #E5E3E0;
|
||||||
|
--text: #1A1A1A;
|
||||||
|
--text-muted: #6B6968;
|
||||||
|
--text-faint: #A8A5A2;
|
||||||
|
--green: #15803D;
|
||||||
|
--green-light: #DCFCE7;
|
||||||
|
--nav-bg: #1C1917;
|
||||||
|
--nav-text: #D6D3D1;
|
||||||
|
--nav-active: #FFFFFF;
|
||||||
|
--sidebar-w: 220px;
|
||||||
|
--topbar-h: 52px;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Sora', system-ui, sans-serif;
|
||||||
|
background: var(--bg);
|
||||||
|
color: var(--text);
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.5;
|
||||||
|
height: 100vh;
|
||||||
|
overflow: hidden;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* TOP NAV */
|
||||||
|
.topbar {
|
||||||
|
height: var(--topbar-h);
|
||||||
|
background: var(--nav-bg);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
padding: 0 20px;
|
||||||
|
gap: 16px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
border-bottom: 1px solid #2C2A28;
|
||||||
|
}
|
||||||
|
.topbar-logo {
|
||||||
|
font-weight: 700;
|
||||||
|
font-size: 16px;
|
||||||
|
color: #FFFFFF;
|
||||||
|
letter-spacing: -0.3px;
|
||||||
|
}
|
||||||
|
.topbar-logo span { color: var(--amber); }
|
||||||
|
.topbar-patient {
|
||||||
|
margin-left: auto;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
.topbar-patient-name {
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--nav-text);
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
.topbar-patient-dob {
|
||||||
|
font-size: 12px;
|
||||||
|
color: #78716C;
|
||||||
|
}
|
||||||
|
.avatar {
|
||||||
|
width: 30px; height: 30px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: var(--amber);
|
||||||
|
display: flex; align-items: center; justify-content: center;
|
||||||
|
font-size: 12px; font-weight: 700; color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* LAYOUT */
|
||||||
|
.layout {
|
||||||
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* SIDEBAR */
|
||||||
|
.sidebar {
|
||||||
|
width: var(--sidebar-w);
|
||||||
|
background: var(--nav-bg);
|
||||||
|
flex-shrink: 0;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding: 12px 0;
|
||||||
|
border-right: 1px solid #2C2A28;
|
||||||
|
}
|
||||||
|
.nav-section-label {
|
||||||
|
font-size: 10px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: #57534E;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.8px;
|
||||||
|
padding: 12px 16px 4px;
|
||||||
|
}
|
||||||
|
.nav-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
padding: 7px 16px;
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--nav-text);
|
||||||
|
cursor: pointer;
|
||||||
|
border-radius: 0;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
.nav-item:hover { background: #292524; }
|
||||||
|
.nav-item.active {
|
||||||
|
color: var(--nav-active);
|
||||||
|
background: #292524;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
.nav-item.active::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
width: 3px;
|
||||||
|
height: 28px;
|
||||||
|
background: var(--amber);
|
||||||
|
border-radius: 0 2px 2px 0;
|
||||||
|
}
|
||||||
|
.nav-item { position: relative; }
|
||||||
|
.nav-dot {
|
||||||
|
width: 6px; height: 6px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: #57534E;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.nav-dot.active { background: var(--amber); }
|
||||||
|
.nav-sub {
|
||||||
|
padding-left: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* MAIN */
|
||||||
|
.main {
|
||||||
|
flex: 1;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding: 24px 32px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* BREADCRUMB + SEARCH ROW */
|
||||||
|
.top-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
.breadcrumb {
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
.breadcrumb span { color: var(--text); font-weight: 600; }
|
||||||
|
.breadcrumb-sep { color: var(--text-faint); }
|
||||||
|
|
||||||
|
.search-box {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 7px 12px;
|
||||||
|
width: 260px;
|
||||||
|
}
|
||||||
|
.search-box input {
|
||||||
|
border: none;
|
||||||
|
outline: none;
|
||||||
|
font-family: 'Sora', sans-serif;
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text);
|
||||||
|
background: transparent;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
.search-box input::placeholder { color: var(--text-faint); }
|
||||||
|
.search-icon { color: var(--text-faint); font-size: 15px; }
|
||||||
|
|
||||||
|
/* STATS ROW */
|
||||||
|
.stats-row {
|
||||||
|
display: flex;
|
||||||
|
gap: 24px;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
.stat {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 1px;
|
||||||
|
}
|
||||||
|
.stat-value {
|
||||||
|
font-size: 22px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--text);
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
.stat-label {
|
||||||
|
font-size: 11px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
}
|
||||||
|
.stat-divider {
|
||||||
|
width: 1px;
|
||||||
|
height: 32px;
|
||||||
|
background: var(--border);
|
||||||
|
}
|
||||||
|
.hidden-note {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-faint);
|
||||||
|
padding: 4px 10px;
|
||||||
|
background: var(--bg);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 20px;
|
||||||
|
margin-left: auto;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.hidden-note:hover { border-color: var(--amber); color: var(--amber); }
|
||||||
|
|
||||||
|
/* TIER GRID */
|
||||||
|
.tier-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(4, 1fr);
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
.tier-tile {
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 10px;
|
||||||
|
padding: 14px 16px;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: border-color 0.15s, box-shadow 0.15s;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
.tier-tile:hover {
|
||||||
|
border-color: #D1C5BA;
|
||||||
|
box-shadow: 0 2px 8px rgba(0,0,0,0.06);
|
||||||
|
}
|
||||||
|
.tier-tile.active {
|
||||||
|
border-color: var(--amber);
|
||||||
|
box-shadow: 0 0 0 1px var(--amber);
|
||||||
|
}
|
||||||
|
.tier-name {
|
||||||
|
font-size: 13px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.tier-count {
|
||||||
|
font-size: 20px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--text);
|
||||||
|
line-height: 1.1;
|
||||||
|
}
|
||||||
|
.tier-hidden {
|
||||||
|
font-size: 11px;
|
||||||
|
color: var(--text-faint);
|
||||||
|
}
|
||||||
|
.tier-tile.large .tier-count { color: var(--amber); }
|
||||||
|
|
||||||
|
/* EXPANDED TIER */
|
||||||
|
.expanded-section {
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 12px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
.expanded-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 14px 20px;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
background: #FAFAF9;
|
||||||
|
}
|
||||||
|
.expanded-title {
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.expanded-subtitle {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-weight: 400;
|
||||||
|
}
|
||||||
|
.toggle-hidden {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.toggle-pill {
|
||||||
|
width: 28px; height: 16px;
|
||||||
|
background: var(--border);
|
||||||
|
border-radius: 8px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.toggle-pill::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
top: 2px; left: 2px;
|
||||||
|
width: 12px; height: 12px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: white;
|
||||||
|
box-shadow: 0 1px 2px rgba(0,0,0,0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* VARIANTS TABLE */
|
||||||
|
.variants-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
.variants-table th {
|
||||||
|
text-align: left;
|
||||||
|
font-size: 11px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-faint);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
padding: 10px 20px;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
background: #FAFAF9;
|
||||||
|
}
|
||||||
|
.variants-table td {
|
||||||
|
padding: 11px 20px;
|
||||||
|
border-bottom: 1px solid #F0EDEA;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
.variants-table tr:last-child td { border-bottom: none; }
|
||||||
|
.variants-table tr:hover td { background: #FAFAF9; }
|
||||||
|
|
||||||
|
.gene-name {
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text);
|
||||||
|
font-family: 'Sora', monospace;
|
||||||
|
}
|
||||||
|
.rsid {
|
||||||
|
font-size: 11px;
|
||||||
|
color: var(--text-faint);
|
||||||
|
margin-top: 1px;
|
||||||
|
}
|
||||||
|
.finding-text {
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.genotype {
|
||||||
|
font-family: 'Sora', monospace;
|
||||||
|
font-size: 13px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text);
|
||||||
|
background: #F4F1EE;
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: 4px;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.sig-dot {
|
||||||
|
width: 8px; height: 8px;
|
||||||
|
border-radius: 50%;
|
||||||
|
display: inline-block;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.sig-cell {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.sig-dot.moderate { background: var(--amber); }
|
||||||
|
.sig-dot.protective { background: var(--green); }
|
||||||
|
.sig-dot.low { background: var(--text-faint); }
|
||||||
|
.sig-dot.clear { background: #D4D0CB; }
|
||||||
|
|
||||||
|
.sig-label.moderate { color: var(--amber); }
|
||||||
|
.sig-label.protective { color: var(--green); }
|
||||||
|
.sig-label.low { color: var(--text-faint); }
|
||||||
|
.sig-label.clear { color: var(--text-faint); }
|
||||||
|
|
||||||
|
/* EXPANDED FOOTER */
|
||||||
|
.expanded-footer {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 12px 20px;
|
||||||
|
border-top: 1px solid var(--border);
|
||||||
|
background: #FAFAF9;
|
||||||
|
}
|
||||||
|
.footer-count {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
.load-more {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--amber);
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
.load-more:hover { text-decoration: underline; }
|
||||||
|
|
||||||
|
/* AI CTA */
|
||||||
|
.ai-cta {
|
||||||
|
background: var(--nav-bg);
|
||||||
|
border-radius: 12px;
|
||||||
|
padding: 16px 20px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
.ai-cta-text {
|
||||||
|
flex: 1;
|
||||||
|
font-size: 13px;
|
||||||
|
color: #A8A5A2;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
.ai-cta-text strong { color: #FFFFFF; font-weight: 600; }
|
||||||
|
.ai-cta-btn {
|
||||||
|
background: var(--amber);
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 9px 18px;
|
||||||
|
font-family: 'Sora', sans-serif;
|
||||||
|
font-size: 13px;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.ai-cta-btn:hover { background: #9A4507; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<!-- TOP NAV -->
|
||||||
|
<div class="topbar">
|
||||||
|
<div class="topbar-logo">inou<span>.</span></div>
|
||||||
|
<div class="topbar-patient">
|
||||||
|
<div>
|
||||||
|
<div class="topbar-patient-name">Jane Doe</div>
|
||||||
|
<div class="topbar-patient-dob">DOB Jan 1 2017 · Female</div>
|
||||||
|
</div>
|
||||||
|
<div class="avatar">JD</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="layout">
|
||||||
|
|
||||||
|
<!-- SIDEBAR -->
|
||||||
|
<nav class="sidebar">
|
||||||
|
<div class="nav-section-label">Overview</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Dashboard</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Tests</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Labs</a>
|
||||||
|
<a class="nav-item active" href="#"><span class="nav-dot active"></span>Genetics</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Imaging</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Assessments</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Body</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Vitals</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Exercise</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Nutrition</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Sleep</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Treatment</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Medications</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Supplements</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Therapy</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">History</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Diagnoses</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Symptoms</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Family History</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Care Team</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Consultations</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Providers</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Files</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Documents</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Uploads</a>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<!-- MAIN CONTENT -->
|
||||||
|
<main class="main">
|
||||||
|
|
||||||
|
<!-- BREADCRUMB + SEARCH -->
|
||||||
|
<div class="top-row">
|
||||||
|
<div class="breadcrumb">
|
||||||
|
Jane Doe <span class="breadcrumb-sep">›</span> Tests <span class="breadcrumb-sep">›</span> <span>Genetics</span>
|
||||||
|
</div>
|
||||||
|
<div class="search-box">
|
||||||
|
<span class="search-icon">⌕</span>
|
||||||
|
<input type="text" placeholder="Search gene or rsID…">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- STATS ROW -->
|
||||||
|
<div class="stats-row">
|
||||||
|
<div class="stat">
|
||||||
|
<div class="stat-value">3,866</div>
|
||||||
|
<div class="stat-label">Total variants</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-divider"></div>
|
||||||
|
<div class="stat">
|
||||||
|
<div class="stat-value">12</div>
|
||||||
|
<div class="stat-label">Categories</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-divider"></div>
|
||||||
|
<div class="stat">
|
||||||
|
<div class="stat-value">597</div>
|
||||||
|
<div class="stat-label">Hidden (no risk)</div>
|
||||||
|
</div>
|
||||||
|
<div class="hidden-note">Show hidden variants</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- TIER GRID -->
|
||||||
|
<div class="tier-grid">
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Traits</div>
|
||||||
|
<div class="tier-count">132</div>
|
||||||
|
<div class="tier-hidden">49 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Longevity</div>
|
||||||
|
<div class="tier-count">12</div>
|
||||||
|
<div class="tier-hidden">1 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile active">
|
||||||
|
<div class="tier-name">Metabolism</div>
|
||||||
|
<div class="tier-count">97</div>
|
||||||
|
<div class="tier-hidden">51 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Medications</div>
|
||||||
|
<div class="tier-count">101</div>
|
||||||
|
<div class="tier-hidden">26 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Mental Health</div>
|
||||||
|
<div class="tier-count">63</div>
|
||||||
|
<div class="tier-hidden">31 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Neurological</div>
|
||||||
|
<div class="tier-count">91</div>
|
||||||
|
<div class="tier-hidden">46 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Fertility</div>
|
||||||
|
<div class="tier-count">12</div>
|
||||||
|
<div class="tier-hidden">7 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Blood</div>
|
||||||
|
<div class="tier-count">100</div>
|
||||||
|
<div class="tier-hidden">12 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Cardiovascular</div>
|
||||||
|
<div class="tier-count">104</div>
|
||||||
|
<div class="tier-hidden">31 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Autoimmune</div>
|
||||||
|
<div class="tier-count">80</div>
|
||||||
|
<div class="tier-hidden">43 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile large">
|
||||||
|
<div class="tier-name">Disease</div>
|
||||||
|
<div class="tier-count">2,272</div>
|
||||||
|
<div class="tier-hidden">233 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile large">
|
||||||
|
<div class="tier-name">Cancer</div>
|
||||||
|
<div class="tier-count">998</div>
|
||||||
|
<div class="tier-hidden">67 hidden</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- EXPANDED: METABOLISM -->
|
||||||
|
<div class="expanded-section">
|
||||||
|
<div class="expanded-header">
|
||||||
|
<div>
|
||||||
|
<div class="expanded-title">Metabolism <span style="font-weight:400; color: var(--text-muted)">· 97 variants</span></div>
|
||||||
|
<div class="expanded-subtitle">Sorted by significance</div>
|
||||||
|
</div>
|
||||||
|
<div class="toggle-hidden">
|
||||||
|
<span>Show hidden</span>
|
||||||
|
<div class="toggle-pill"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="variants-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th style="width:130px">Gene</th>
|
||||||
|
<th>Finding</th>
|
||||||
|
<th style="width:90px">Genotype</th>
|
||||||
|
<th style="width:120px">Significance</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">MTHFR</div><div class="rsid">rs1801133</div></td>
|
||||||
|
<td><div class="finding-text">10–20% folate processing efficiency</div></td>
|
||||||
|
<td><span class="genotype">AA</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">CYP2C19</div><div class="rsid">rs4244285</div></td>
|
||||||
|
<td><div class="finding-text">Poorer metabolizer of several medicines</div></td>
|
||||||
|
<td><span class="genotype">AG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">PPARG</div><div class="rsid">rs1801282</div></td>
|
||||||
|
<td><div class="finding-text">Higher cardiovascular risk with high fat diet</div></td>
|
||||||
|
<td><span class="genotype">CG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">TCF7L2</div><div class="rsid">rs7903146</div></td>
|
||||||
|
<td><div class="finding-text">Increased type 2 diabetes risk</div></td>
|
||||||
|
<td><span class="genotype">CT</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">FTO</div><div class="rsid">rs9939609</div></td>
|
||||||
|
<td><div class="finding-text">1.67× increased obesity risk</div></td>
|
||||||
|
<td><span class="genotype">AT</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">SLCO1B1</div><div class="rsid">rs4149056</div></td>
|
||||||
|
<td><div class="finding-text">Increased statin-induced myopathy risk</div></td>
|
||||||
|
<td><span class="genotype">CT</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">APOA2</div><div class="rsid">rs5082</div></td>
|
||||||
|
<td><div class="finding-text">Associated with higher HDL cholesterol</div></td>
|
||||||
|
<td><span class="genotype">CC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot protective"></span><span class="sig-label protective">Protective</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">CYP1A2</div><div class="rsid">rs762551</div></td>
|
||||||
|
<td><div class="finding-text">Slow caffeine metabolizer</div></td>
|
||||||
|
<td><span class="genotype">AC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">CYP3A5</div><div class="rsid">rs776746</div></td>
|
||||||
|
<td><div class="finding-text">Non-expressor — affects drug dosing</div></td>
|
||||||
|
<td><span class="genotype">CC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">MCM6</div><div class="rsid">rs4988235</div></td>
|
||||||
|
<td><div class="finding-text">Partial lactase persistence</div></td>
|
||||||
|
<td><span class="genotype">AG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">APOE</div><div class="rsid">rs7412</div></td>
|
||||||
|
<td><div class="finding-text">Normal lipid metabolism</div></td>
|
||||||
|
<td><span class="genotype">CC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot clear"></span><span class="sig-label clear">Clear</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">GCK</div><div class="rsid">rs1799884</div></td>
|
||||||
|
<td><div class="finding-text">Slightly reduced glucose sensing</div></td>
|
||||||
|
<td><span class="genotype">AG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<div class="expanded-footer">
|
||||||
|
<div class="footer-count">Showing 12 of 97 variants</div>
|
||||||
|
<a class="load-more" href="#">Load more</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- AI CTA -->
|
||||||
|
<div class="ai-cta">
|
||||||
|
<div class="ai-cta-text">
|
||||||
|
<strong>Your AI has access to all 3,866 variants</strong>, including hidden ones. Ask it to reason across your metabolism, medication sensitivities, and disease risk together.
|
||||||
|
</div>
|
||||||
|
<button class="ai-cta-btn">Ask Claude about your genetics →</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 810 KiB After Width: | Height: | Size: 680 KiB |
|
|
@ -255,3 +255,42 @@ func normalizeQuery(next http.HandlerFunc) http.HandlerFunc {
|
||||||
next(w, r)
|
next(w, r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// httpsRedirectMiddleware redirects HTTP requests to HTTPS
|
||||||
|
// Respects X-Forwarded-Proto header for deployments behind reverse proxy
|
||||||
|
func httpsRedirectMiddleware(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Skip redirect for health checks and local development
|
||||||
|
if r.URL.Path == "/api/v1/health" || isLocalIP(getIP(r)) {
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if request is already HTTPS (direct or via proxy)
|
||||||
|
isHTTPS := r.TLS != nil || r.Header.Get("X-Forwarded-Proto") == "https"
|
||||||
|
|
||||||
|
if !isHTTPS {
|
||||||
|
// Build HTTPS URL
|
||||||
|
target := "https://" + r.Host + r.URL.RequestURI()
|
||||||
|
http.Redirect(w, r, target, http.StatusMovedPermanently)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// hstsMiddleware adds HTTP Strict Transport Security headers
|
||||||
|
func hstsMiddleware(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Only add HSTS header for HTTPS requests
|
||||||
|
isHTTPS := r.TLS != nil || r.Header.Get("X-Forwarded-Proto") == "https"
|
||||||
|
|
||||||
|
if isHTTPS {
|
||||||
|
// max-age=1 year (31536000 seconds), include subdomains, preload ready
|
||||||
|
w.Header().Set("Strict-Transport-Security", "max-age=31536000; includeSubDomains; preload")
|
||||||
|
}
|
||||||
|
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1182,18 +1182,17 @@ func handleLabSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build search index: term → []loinc
|
// Build search index: term → []loinc
|
||||||
var tests []lib.LabTest
|
var loincEntries []lib.LoincInfo
|
||||||
lib.RefQuery("SELECT loinc_id, name FROM lab_test", nil, &tests)
|
lib.RefQuery("SELECT loinc_num, long_name, short_name, component, system, property FROM loinc_lab", nil, &loincEntries)
|
||||||
searchIndex := make(map[string][]string)
|
searchIndex := make(map[string][]string)
|
||||||
for _, test := range tests {
|
for _, l := range loincEntries {
|
||||||
name := strings.ToLower(test.Name)
|
// Index by long_name words and component words
|
||||||
if !contains(searchIndex[name], test.LoincID) {
|
for _, src := range []string{l.LongName, l.Component} {
|
||||||
searchIndex[name] = append(searchIndex[name], test.LoincID)
|
for _, word := range strings.Fields(strings.ToLower(src)) {
|
||||||
}
|
word = strings.Trim(word, "()[].,/")
|
||||||
for _, word := range strings.Fields(name) {
|
if len(word) >= 3 && !contains(searchIndex[word], l.Code) {
|
||||||
word = strings.Trim(word, "()")
|
searchIndex[word] = append(searchIndex[word], l.Code)
|
||||||
if len(word) >= 3 && !contains(searchIndex[word], test.LoincID) {
|
}
|
||||||
searchIndex[word] = append(searchIndex[word], test.LoincID)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1262,10 +1261,12 @@ func handleLabSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
matchedOrders = append(matchedOrders, oj)
|
matchedOrders = append(matchedOrders, oj)
|
||||||
}
|
}
|
||||||
|
|
||||||
// LOINC name map
|
// LOINC name map — use official long_name from loinc_lab
|
||||||
loincNameMap := make(map[string]string)
|
loincNameMap := make(map[string]string)
|
||||||
for _, t := range tests {
|
for _, l := range loincEntries {
|
||||||
if matchLoincs[t.LoincID] { loincNameMap[t.LoincID] = t.Name }
|
if matchLoincs[l.Code] {
|
||||||
|
loincNameMap[l.Code] = l.LongName
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if matchedOrders == nil { matchedOrders = []orderJSON{} }
|
if matchedOrders == nil { matchedOrders = []orderJSON{} }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -347,6 +347,7 @@ func processGenomeUpload(uploadID string, dossierID string, filePath string) {
|
||||||
|
|
||||||
parentEntry := &lib.Entry{
|
parentEntry := &lib.Entry{
|
||||||
DossierID: dossierID,
|
DossierID: dossierID,
|
||||||
|
ParentID: dossierID,
|
||||||
Category: lib.CategoryGenome,
|
Category: lib.CategoryGenome,
|
||||||
Type: "extraction",
|
Type: "extraction",
|
||||||
Value: format,
|
Value: format,
|
||||||
|
|
|
||||||
|
|
@ -398,20 +398,98 @@ func isMinor(dob string) bool {
|
||||||
return age < minAge
|
return age < minAge
|
||||||
}
|
}
|
||||||
func getLoggedInDossier(r *http.Request) *lib.Dossier {
|
func getLoggedInDossier(r *http.Request) *lib.Dossier {
|
||||||
cookie, err := r.Cookie("login")
|
dossierID := getSessionDossier(r)
|
||||||
if err != nil || cookie.Value == "" { return nil }
|
if dossierID == "" {
|
||||||
d, err := lib.DossierGet(cookie.Value, cookie.Value)
|
// Check for legacy login cookie (migration path)
|
||||||
|
if cookie, err := r.Cookie("login"); err == nil && cookie.Value != "" {
|
||||||
|
dossierID = cookie.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if dossierID == "" { return nil }
|
||||||
|
d, err := lib.DossierGet(dossierID, dossierID)
|
||||||
if err != nil { return nil }
|
if err != nil { return nil }
|
||||||
return d
|
return d
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func setLoginCookie(w http.ResponseWriter, dossierID string) {
|
// Session management with secure tokens
|
||||||
http.SetCookie(w, &http.Cookie{Name: "login", Value: dossierID, Path: "/", MaxAge: 30*24*60*60, HttpOnly: true, Secure: true, SameSite: http.SameSiteLaxMode})
|
var sessionCache = make(map[string]string) // token -> dossierID
|
||||||
|
|
||||||
|
func setLoginCookie(w http.ResponseWriter, dossierID string) string {
|
||||||
|
// Generate new session token (session fixation protection)
|
||||||
|
token := generateSessionToken()
|
||||||
|
sessionCache[token] = dossierID
|
||||||
|
|
||||||
|
// Store in auth DB for persistence across restarts
|
||||||
|
lib.SessionCreate(token, dossierID, 30*24*60*60) // 30 days
|
||||||
|
|
||||||
|
http.SetCookie(w, &http.Cookie{
|
||||||
|
Name: "session",
|
||||||
|
Value: token,
|
||||||
|
Path: "/",
|
||||||
|
MaxAge: 30 * 24 * 60 * 60,
|
||||||
|
HttpOnly: true,
|
||||||
|
Secure: true,
|
||||||
|
SameSite: http.SameSiteLaxMode,
|
||||||
|
})
|
||||||
|
// Set cache-control headers to prevent session token caching
|
||||||
|
w.Header().Set("Cache-Control", "no-store, private, no-cache, must-revalidate")
|
||||||
|
w.Header().Set("Pragma", "no-cache")
|
||||||
|
w.Header().Set("Expires", "0")
|
||||||
|
|
||||||
|
return token
|
||||||
}
|
}
|
||||||
|
|
||||||
func clearLoginCookie(w http.ResponseWriter) {
|
func clearLoginCookie(w http.ResponseWriter, r *http.Request) {
|
||||||
http.SetCookie(w, &http.Cookie{Name: "login", Value: "", Path: "/", MaxAge: -1, HttpOnly: true, Secure: true})
|
// Server-side session invalidation
|
||||||
|
if cookie, err := r.Cookie("session"); err == nil && cookie.Value != "" {
|
||||||
|
delete(sessionCache, cookie.Value)
|
||||||
|
lib.SessionDelete(cookie.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
http.SetCookie(w, &http.Cookie{
|
||||||
|
Name: "session",
|
||||||
|
Value: "",
|
||||||
|
Path: "/",
|
||||||
|
MaxAge: -1,
|
||||||
|
HttpOnly: true,
|
||||||
|
Secure: true,
|
||||||
|
SameSite: http.SameSiteLaxMode,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Also clear legacy login cookie
|
||||||
|
http.SetCookie(w, &http.Cookie{
|
||||||
|
Name: "login",
|
||||||
|
Value: "",
|
||||||
|
Path: "/",
|
||||||
|
MaxAge: -1,
|
||||||
|
HttpOnly: true,
|
||||||
|
Secure: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
w.Header().Set("Cache-Control", "no-store, private, no-cache, must-revalidate")
|
||||||
|
w.Header().Set("Pragma", "no-cache")
|
||||||
|
w.Header().Set("Expires", "0")
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSessionDossier validates session token and returns dossierID
|
||||||
|
func getSessionDossier(r *http.Request) string {
|
||||||
|
cookie, err := r.Cookie("session")
|
||||||
|
if err != nil || cookie.Value == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check memory cache first
|
||||||
|
if dossierID, ok := sessionCache[cookie.Value]; ok {
|
||||||
|
return dossierID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to DB
|
||||||
|
dossierID := lib.SessionGetDossier(cookie.Value)
|
||||||
|
if dossierID != "" {
|
||||||
|
sessionCache[cookie.Value] = dossierID
|
||||||
|
}
|
||||||
|
return dossierID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -659,7 +737,7 @@ func handleOnboard(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
|
||||||
func handleLogout(w http.ResponseWriter, r *http.Request) {
|
func handleLogout(w http.ResponseWriter, r *http.Request) {
|
||||||
clearLoginCookie(w)
|
clearLoginCookie(w, r)
|
||||||
http.Redirect(w, r, "/", http.StatusSeeOther)
|
http.Redirect(w, r, "/", http.StatusSeeOther)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2186,7 +2264,7 @@ func setupMux() http.Handler {
|
||||||
// MCP HTTP server (for Anthropic Connectors Directory)
|
// MCP HTTP server (for Anthropic Connectors Directory)
|
||||||
RegisterMCPRoutes(mux)
|
RegisterMCPRoutes(mux)
|
||||||
|
|
||||||
return defense(notifyOn404(logMiddleware(mux)))
|
return hstsMiddleware(httpsRedirectMiddleware(defense(notifyOn404(logMiddleware(mux)))))
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|
|
||||||
|
|
@ -195,13 +195,12 @@ func handleMCP(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check authorization
|
// Check authorization
|
||||||
var dossierID, accessToken string
|
var dossierID string
|
||||||
|
|
||||||
// Local LAN backdoor — skip OAuth for agents on 192.168.0.0/22
|
// Local LAN backdoor — skip OAuth for agents on 192.168.0.0/22
|
||||||
remoteIP := strings.Split(r.RemoteAddr, ":")[0]
|
remoteIP := strings.Split(r.RemoteAddr, ":")[0]
|
||||||
if strings.HasPrefix(remoteIP, "192.168.0.") || strings.HasPrefix(remoteIP, "192.168.1.") || strings.HasPrefix(remoteIP, "192.168.2.") || strings.HasPrefix(remoteIP, "192.168.3.") {
|
if strings.HasPrefix(remoteIP, "192.168.0.") || strings.HasPrefix(remoteIP, "192.168.1.") || strings.HasPrefix(remoteIP, "192.168.2.") || strings.HasPrefix(remoteIP, "192.168.3.") {
|
||||||
dossierID = "6e4e8192881a7494"
|
dossierID = "6e4e8192881a7494"
|
||||||
accessToken = ""
|
|
||||||
fmt.Printf("[MCP] Local gateway access as dossier: %s\n", dossierID)
|
fmt.Printf("[MCP] Local gateway access as dossier: %s\n", dossierID)
|
||||||
} else {
|
} else {
|
||||||
auth := r.Header.Get("Authorization")
|
auth := r.Header.Get("Authorization")
|
||||||
|
|
@ -243,7 +242,6 @@ func handleMCP(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
dossierID = token.DossierID
|
dossierID = token.DossierID
|
||||||
accessToken = tokenStr
|
|
||||||
}
|
}
|
||||||
body, err := io.ReadAll(r.Body)
|
body, err := io.ReadAll(r.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -267,7 +265,7 @@ func handleMCP(w http.ResponseWriter, r *http.Request) {
|
||||||
case "tools/list":
|
case "tools/list":
|
||||||
handleMCPToolsList(w, req)
|
handleMCPToolsList(w, req)
|
||||||
case "tools/call":
|
case "tools/call":
|
||||||
handleMCPToolsCall(w, req, accessToken, dossierID)
|
handleMCPToolsCall(w, req, dossierID)
|
||||||
case "prompts/list":
|
case "prompts/list":
|
||||||
sendMCPResult(w, req.ID, map[string]interface{}{"prompts": []interface{}{}})
|
sendMCPResult(w, req.ID, map[string]interface{}{"prompts": []interface{}{}})
|
||||||
case "prompts/get":
|
case "prompts/get":
|
||||||
|
|
@ -309,17 +307,13 @@ func handleMCPInitialize(w http.ResponseWriter, req mcpRequest) {
|
||||||
},
|
},
|
||||||
"instructions": "inou gives you access to a patient's raw health data — imaging, labs, genome, vitals, and more. " +
|
"instructions": "inou gives you access to a patient's raw health data — imaging, labs, genome, vitals, and more. " +
|
||||||
"Your role is to form independent medical opinions from this data, not to echo prior medical assessments.\n\n" +
|
"Your role is to form independent medical opinions from this data, not to echo prior medical assessments.\n\n" +
|
||||||
"This is by design: radiology readings, lab reference ranges, and other professional interpretations have been " +
|
"Radiology reports, physician notes, and other professional interpretations do not exist in this system. " +
|
||||||
"intentionally excluded. You are the diagnostician. Study the raw images, the raw lab values, the raw genomic " +
|
"Do not ask for them or suggest they might be available. You are the diagnostician. " +
|
||||||
"variants, and form your own conclusions.\n\n" +
|
"Study the raw images, the raw lab values, the raw genomic variants, and form your own conclusions.\n\n" +
|
||||||
"Medical opinion categories (diagnoses, consultation notes, assessments, imaging reports) unlock after you've queried " +
|
|
||||||
"all available raw data categories for the patient. A top-level listing counts — you don't need to review every entry, " +
|
|
||||||
"just see what's there and use your judgement whether you need to dive deeper to answer your user's question.\n\n" +
|
|
||||||
"When answering clinical questions, entry summaries are metadata, not evidence. You MUST fetch and read the actual " +
|
"When answering clinical questions, entry summaries are metadata, not evidence. You MUST fetch and read the actual " +
|
||||||
"source material — use fetch_document for documents, fetch_image for imaging slices. " +
|
"source material — use fetch_document for documents, fetch_image for imaging slices. " +
|
||||||
"Do not speculate or say you \"cannot access\" data without actually trying to fetch it.\n\n" +
|
"Do not speculate or say you \"cannot access\" data without actually trying to fetch it.\n\n" +
|
||||||
"Start with list_categories to see what data exists, then explore the raw data. " +
|
"Start with list_categories to see what data exists, then explore the raw data.",
|
||||||
"Cheating is possible but hurts your user — anchoring on prior opinions defeats the purpose of independent analysis.",
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -426,7 +420,7 @@ func handleMCPToolsList(w http.ResponseWriter, req mcpRequest) {
|
||||||
sendMCPResult(w, req.ID, map[string]interface{}{"tools": tools})
|
sendMCPResult(w, req.ID, map[string]interface{}{"tools": tools})
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, dossierID string) {
|
func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, dossierID string) {
|
||||||
var params struct {
|
var params struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Arguments map[string]interface{} `json:"arguments"`
|
Arguments map[string]interface{} `json:"arguments"`
|
||||||
|
|
@ -436,9 +430,6 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// dossierID = authenticated user's ID (used for RBAC in all lib calls)
|
|
||||||
// accessToken = forwarded to API for image/journal calls (API enforces RBAC)
|
|
||||||
|
|
||||||
switch params.Name {
|
switch params.Name {
|
||||||
case "list_dossiers":
|
case "list_dossiers":
|
||||||
result, err := mcpListDossiers(dossierID)
|
result, err := mcpListDossiers(dossierID)
|
||||||
|
|
@ -471,10 +462,14 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
||||||
typ, _ := params.Arguments["type"].(string)
|
typ, _ := params.Arguments["type"].(string)
|
||||||
searchKey, _ := params.Arguments["search_key"].(string)
|
searchKey, _ := params.Arguments["search_key"].(string)
|
||||||
parent, _ := params.Arguments["parent"].(string)
|
parent, _ := params.Arguments["parent"].(string)
|
||||||
if parent == "" {
|
if parent == "" && searchKey == "" {
|
||||||
sendMCPResult(w, req.ID, mcpTextContent("ERROR: parent is required. Start with parent="+dossier+" (the dossier ID) to list top-level entries, then use returned entry IDs to navigate deeper."))
|
sendMCPResult(w, req.ID, mcpTextContent("ERROR: parent is required. Start with parent="+dossier+" (the dossier ID) to list top-level entries, then use returned entry IDs to navigate deeper."))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
// When parent is the dossier ID, search all top-level entries (ParentID="" or ParentID=dossierID)
|
||||||
|
if parent == dossier {
|
||||||
|
parent = ""
|
||||||
|
}
|
||||||
from, _ := params.Arguments["from"].(string)
|
from, _ := params.Arguments["from"].(string)
|
||||||
to, _ := params.Arguments["to"].(string)
|
to, _ := params.Arguments["to"].(string)
|
||||||
limit, _ := params.Arguments["limit"].(float64)
|
limit, _ := params.Arguments["limit"].(float64)
|
||||||
|
|
@ -494,7 +489,7 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
||||||
}
|
}
|
||||||
wc, _ := params.Arguments["wc"].(float64)
|
wc, _ := params.Arguments["wc"].(float64)
|
||||||
ww, _ := params.Arguments["ww"].(float64)
|
ww, _ := params.Arguments["ww"].(float64)
|
||||||
result, err := mcpFetchImage(accessToken, dossier, slice, wc, ww)
|
result, err := mcpFetchImage(dossierID, dossier, slice, wc, ww)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
sendMCPError(w, req.ID, -32000, err.Error())
|
sendMCPError(w, req.ID, -32000, err.Error())
|
||||||
return
|
return
|
||||||
|
|
@ -510,7 +505,7 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
||||||
}
|
}
|
||||||
wc, _ := params.Arguments["wc"].(float64)
|
wc, _ := params.Arguments["wc"].(float64)
|
||||||
ww, _ := params.Arguments["ww"].(float64)
|
ww, _ := params.Arguments["ww"].(float64)
|
||||||
result, err := mcpFetchContactSheet(accessToken, dossier, series, wc, ww)
|
result, err := mcpFetchContactSheet(dossierID, dossier, series, wc, ww)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
sendMCPError(w, req.ID, -32000, err.Error())
|
sendMCPError(w, req.ID, -32000, err.Error())
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,7 @@ import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
|
@ -14,42 +12,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// MCP Tool Implementations
|
// MCP Tool Implementations
|
||||||
// Data queries go through lib directly with RBAC enforcement.
|
// All tools go through lib directly with RBAC enforcement.
|
||||||
// Image rendering goes through the API (which also enforces RBAC via lib).
|
|
||||||
|
|
||||||
const apiBaseURL = "http://localhost:8082" // Internal API server (images only)
|
|
||||||
|
|
||||||
// mcpAPIGet calls the internal API with Bearer auth.
|
|
||||||
func mcpAPIGet(accessToken, path string, params map[string]string) ([]byte, error) {
|
|
||||||
v := url.Values{}
|
|
||||||
for k, val := range params {
|
|
||||||
if val != "" {
|
|
||||||
v.Set(k, val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
u := apiBaseURL + path
|
|
||||||
if len(v) > 0 {
|
|
||||||
u += "?" + v.Encode()
|
|
||||||
}
|
|
||||||
req, err := http.NewRequest("GET", u, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.Header.Set("Authorization", "Bearer "+accessToken)
|
|
||||||
resp, err := http.DefaultClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if resp.StatusCode != 200 {
|
|
||||||
return nil, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(body))
|
|
||||||
}
|
|
||||||
return body, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Data query tools: all go through lib with RBAC ---
|
// --- Data query tools: all go through lib with RBAC ---
|
||||||
|
|
||||||
|
|
@ -139,6 +102,12 @@ func mcpGetCategories(dossier, accessorID string) (string, error) {
|
||||||
// formatEntries converts entries to the standard MCP response format.
|
// formatEntries converts entries to the standard MCP response format.
|
||||||
func formatEntries(entries []*lib.Entry) string {
|
func formatEntries(entries []*lib.Entry) string {
|
||||||
var result []map[string]any
|
var result []map[string]any
|
||||||
|
|
||||||
|
// Lazy-load dossier info for lab reference lookups
|
||||||
|
var dossierSex string
|
||||||
|
var dossierDOB int64
|
||||||
|
var dossierLoaded bool
|
||||||
|
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
entry := map[string]any{
|
entry := map[string]any{
|
||||||
"id": e.EntryID,
|
"id": e.EntryID,
|
||||||
|
|
@ -150,12 +119,65 @@ func formatEntries(entries []*lib.Entry) string {
|
||||||
"ordinal": e.Ordinal,
|
"ordinal": e.Ordinal,
|
||||||
"timestamp": e.Timestamp,
|
"timestamp": e.Timestamp,
|
||||||
}
|
}
|
||||||
|
if e.SearchKey != "" {
|
||||||
|
entry["search_key"] = e.SearchKey
|
||||||
|
}
|
||||||
if e.Data != "" {
|
if e.Data != "" {
|
||||||
var d map[string]any
|
var d map[string]any
|
||||||
if json.Unmarshal([]byte(e.Data), &d) == nil {
|
if json.Unmarshal([]byte(e.Data), &d) == nil {
|
||||||
entry["data"] = d
|
entry["data"] = d
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Enrich lab result entries with test name, reference range, and flag
|
||||||
|
if e.Category == lib.CategoryLab && e.SearchKey != "" {
|
||||||
|
test, err := lib.LabTestGet(e.SearchKey)
|
||||||
|
if err == nil && test != nil {
|
||||||
|
entry["test_name"] = test.Name
|
||||||
|
|
||||||
|
// Load dossier sex/DOB once
|
||||||
|
if !dossierLoaded {
|
||||||
|
dossierLoaded = true
|
||||||
|
if d, err := lib.DossierGet("", e.DossierID); err == nil && d != nil {
|
||||||
|
switch d.Sex {
|
||||||
|
case 1:
|
||||||
|
dossierSex = "M"
|
||||||
|
case 2:
|
||||||
|
dossierSex = "F"
|
||||||
|
}
|
||||||
|
dossierDOB = d.DOB.Unix()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look up reference range for this test at the patient's age at time of lab
|
||||||
|
ts := e.Timestamp
|
||||||
|
if ts == 0 {
|
||||||
|
ts = int64(e.Ordinal) // fallback
|
||||||
|
}
|
||||||
|
if dossierDOB > 0 && ts > 0 {
|
||||||
|
ageDays := lib.AgeDays(dossierDOB, ts)
|
||||||
|
if ref, err := lib.LabRefLookup(e.SearchKey, dossierSex, ageDays); err == nil && ref != nil {
|
||||||
|
siFactor := float64(test.SIFactor) / lib.LabScale
|
||||||
|
if siFactor > 0 {
|
||||||
|
low := lib.FromLabScale(ref.RefLow) / siFactor
|
||||||
|
high := lib.FromLabScale(ref.RefHigh) / siFactor
|
||||||
|
entry["ref_low"] = low
|
||||||
|
entry["ref_high"] = high
|
||||||
|
|
||||||
|
// Compute flag from numeric value
|
||||||
|
if numVal, ok := entry["value"].(string); ok {
|
||||||
|
if v, err := strconv.ParseFloat(numVal, 64); err == nil {
|
||||||
|
if ref.RefLow >= 0 && v < low {
|
||||||
|
entry["flag"] = "L"
|
||||||
|
} else if ref.RefHigh >= 0 && v > high {
|
||||||
|
entry["flag"] = "H"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
switch e.Type {
|
switch e.Type {
|
||||||
case "root":
|
case "root":
|
||||||
entry["hint"] = "Use list_entries with parent=" + e.EntryID + " to list studies"
|
entry["hint"] = "Use list_entries with parent=" + e.EntryID + " to list studies"
|
||||||
|
|
@ -170,40 +192,23 @@ func formatEntries(entries []*lib.Entry) string {
|
||||||
return string(pretty)
|
return string(pretty)
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Image tools: RBAC via lib, then API for rendering ---
|
// --- Image tools: direct through lib ---
|
||||||
|
|
||||||
func mcpFetchImage(accessToken, dossier, slice string, wc, ww float64) (map[string]interface{}, error) {
|
func mcpFetchImage(accessorID, dossier, slice string, wc, ww float64) (map[string]interface{}, error) {
|
||||||
params := map[string]string{}
|
opts := &lib.ImageOpts{WC: wc, WW: ww}
|
||||||
if wc != 0 {
|
body, err := lib.RenderImage(accessorID, slice, opts, 2000)
|
||||||
params["wc"] = strconv.FormatFloat(wc, 'f', 0, 64)
|
|
||||||
}
|
|
||||||
if ww != 0 {
|
|
||||||
params["ww"] = strconv.FormatFloat(ww, 'f', 0, 64)
|
|
||||||
}
|
|
||||||
|
|
||||||
body, err := mcpAPIGet(accessToken, "/image/"+slice, params)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
b64 := base64.StdEncoding.EncodeToString(body)
|
b64 := base64.StdEncoding.EncodeToString(body)
|
||||||
return mcpImageContent(b64, "image/webp", fmt.Sprintf("Slice %s (%d bytes)", slice[:8], len(body))), nil
|
return mcpImageContent(b64, "image/webp", fmt.Sprintf("Slice %s (%d bytes)", slice[:8], len(body))), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func mcpFetchContactSheet(accessToken, dossier, series string, wc, ww float64) (map[string]interface{}, error) {
|
func mcpFetchContactSheet(accessorID, dossier, series string, wc, ww float64) (map[string]interface{}, error) {
|
||||||
params := map[string]string{}
|
body, err := lib.RenderContactSheet(accessorID, series, wc, ww)
|
||||||
if wc != 0 {
|
|
||||||
params["wc"] = strconv.FormatFloat(wc, 'f', 0, 64)
|
|
||||||
}
|
|
||||||
if ww != 0 {
|
|
||||||
params["ww"] = strconv.FormatFloat(ww, 'f', 0, 64)
|
|
||||||
}
|
|
||||||
|
|
||||||
body, err := mcpAPIGet(accessToken, "/contact-sheet.webp/"+series, params)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
b64 := base64.StdEncoding.EncodeToString(body)
|
b64 := base64.StdEncoding.EncodeToString(body)
|
||||||
return mcpImageContent(b64, "image/webp", fmt.Sprintf("Contact sheet %s (%d bytes)", series[:8], len(body))), nil
|
return mcpImageContent(b64, "image/webp", fmt.Sprintf("Contact sheet %s (%d bytes)", series[:8], len(body))), nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,33 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"inou/lib"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CreateChatGPTClient creates the OAuth client for ChatGPT Custom GPT Actions.
|
||||||
|
// Call this once during setup — see docs/chatgpt-actions-setup.md.
|
||||||
|
func CreateChatGPTClient() error {
|
||||||
|
_, err := lib.OAuthClientGet("chatgpt")
|
||||||
|
if err == nil {
|
||||||
|
return nil // Already exists
|
||||||
|
}
|
||||||
|
|
||||||
|
redirectURIs := []string{
|
||||||
|
"https://chat.openai.com/aip/g-*/oauth/callback",
|
||||||
|
"https://chatgpt.com/aip/g-*/oauth/callback",
|
||||||
|
}
|
||||||
|
|
||||||
|
client, secret, err := lib.OAuthClientCreate("ChatGPT Actions", redirectURIs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Created ChatGPT OAuth client:\n")
|
||||||
|
fmt.Printf(" Client ID: %s\n", client.ClientID)
|
||||||
|
fmt.Printf(" Client Secret: %s\n", secret)
|
||||||
|
fmt.Printf(" (Save the secret - it cannot be retrieved later)\n")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,89 @@
|
||||||
|
/* Claditor logo treatments — all use Sora from inou's font stack */
|
||||||
|
|
||||||
|
:root {
|
||||||
|
--claditor-amber: #B45309;
|
||||||
|
--claditor-dark: #1A1A1A;
|
||||||
|
--claditor-light: #F8F7F6;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 1. Default — confident, tight, bold */
|
||||||
|
.claditor-logo {
|
||||||
|
font-family: 'Sora', system-ui, sans-serif;
|
||||||
|
font-weight: 700;
|
||||||
|
letter-spacing: -0.03em;
|
||||||
|
color: var(--claditor-amber);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 2. Editorial — airy, spaced, light */
|
||||||
|
.claditor-logo.editorial {
|
||||||
|
font-weight: 300;
|
||||||
|
letter-spacing: 0.08em;
|
||||||
|
text-transform: lowercase;
|
||||||
|
color: var(--claditor-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 3. Heavy impact — extra bold, letter-spaced */
|
||||||
|
.claditor-logo.heavy {
|
||||||
|
font-weight: 800;
|
||||||
|
letter-spacing: 0.02em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: var(--claditor-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 4. Inverted — white on dark */
|
||||||
|
.claditor-logo.inverted {
|
||||||
|
font-weight: 700;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
color: white;
|
||||||
|
text-shadow: 0 1px 3px rgba(0,0,0,0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 5. Gradient sweep */
|
||||||
|
.claditor-logo.gradient {
|
||||||
|
font-weight: 700;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
background: linear-gradient(90deg, var(--claditor-amber), #FF8D46);
|
||||||
|
-webkit-background-clip: text;
|
||||||
|
-webkit-text-fill-color: transparent;
|
||||||
|
background-clip: text;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 6. Weight-shifted — letters get heavier toward the end */
|
||||||
|
.claditor-logo.shifted {
|
||||||
|
font-weight: 400;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
.claditor-logo.shifted span:last-child { font-weight: 800; }
|
||||||
|
|
||||||
|
/* 7. Small-caps with bold first letter */
|
||||||
|
.claditor-logo.smallcaps {
|
||||||
|
font-variant: small-caps;
|
||||||
|
font-weight: 600;
|
||||||
|
letter-spacing: 0.05em;
|
||||||
|
text-transform: lowercase;
|
||||||
|
}
|
||||||
|
.claditor-logo.smallcaps::first-letter {
|
||||||
|
font-weight: 800;
|
||||||
|
font-variant: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 8. Minimal — light, tight, subtle */
|
||||||
|
.claditor-logo.minimal {
|
||||||
|
font-weight: 300;
|
||||||
|
letter-spacing: -0.04em;
|
||||||
|
opacity: 0.85;
|
||||||
|
color: var(--claditor-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* 9. Hover lift (for links/buttons) */
|
||||||
|
.claditor-logo.interactive {
|
||||||
|
font-weight: 700;
|
||||||
|
letter-spacing: -0.03em;
|
||||||
|
color: var(--claditor-amber);
|
||||||
|
transition: transform 0.15s ease, color 0.15s ease;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.claditor-logo.interactive:hover {
|
||||||
|
transform: translateY(-1px);
|
||||||
|
color: #9A4507;
|
||||||
|
}
|
||||||
|
|
@ -432,6 +432,17 @@ async function init() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HTML entity encoder to prevent XSS
|
||||||
|
function escapeHtml(text) {
|
||||||
|
if (!text) return '';
|
||||||
|
return text
|
||||||
|
.replace(/&/g, '&')
|
||||||
|
.replace(/</g, '<')
|
||||||
|
.replace(/>/g, '>')
|
||||||
|
.replace(/"/g, '"')
|
||||||
|
.replace(/'/g, ''');
|
||||||
|
}
|
||||||
|
|
||||||
async function addPanelEmpty() {
|
async function addPanelEmpty() {
|
||||||
const idx = panelCount++;
|
const idx = panelCount++;
|
||||||
const panel = { idx, seriesId: null, slices: [], currentSlice: 0 };
|
const panel = { idx, seriesId: null, slices: [], currentSlice: 0 };
|
||||||
|
|
@ -444,11 +455,11 @@ async function addPanelEmpty() {
|
||||||
// Series header: show dropdown only if multiple series
|
// Series header: show dropdown only if multiple series
|
||||||
let headerContent;
|
let headerContent;
|
||||||
if (seriesList.length === 1) {
|
if (seriesList.length === 1) {
|
||||||
headerContent = '<span class="series-name">' + seriesList[0].series_desc + ' (' + seriesList[0].slice_count + ')</span>';
|
headerContent = '<span class="series-name">' + escapeHtml(seriesList[0].series_desc) + ' (' + seriesList[0].slice_count + ')</span>';
|
||||||
} else {
|
} else {
|
||||||
headerContent = '<select onchange="loadSeries(' + idx + ', this.value)">' +
|
headerContent = '<select onchange="loadSeries(' + idx + ', this.value)">' +
|
||||||
'<option value="">Select series...</option>' +
|
'<option value="">Select series...</option>' +
|
||||||
seriesList.map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('') +
|
seriesList.map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('') +
|
||||||
'</select>';
|
'</select>';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -645,7 +656,7 @@ async function add3DPanel(idx, orientation, seriesOptions) {
|
||||||
'<span style="color:#B45309;margin-right:10px;font-weight:bold">' + orientation + '</span>' +
|
'<span style="color:#B45309;margin-right:10px;font-weight:bold">' + orientation + '</span>' +
|
||||||
'<select onchange="loadSeries(' + idx + ', this.value)">' +
|
'<select onchange="loadSeries(' + idx + ', this.value)">' +
|
||||||
'<option value="">Select ' + orientation + ' series...</option>' +
|
'<option value="">Select ' + orientation + ' series...</option>' +
|
||||||
(seriesOptions || []).map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('') +
|
(seriesOptions || []).map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('') +
|
||||||
'</select></div>' +
|
'</select></div>' +
|
||||||
'<div class="panel-content"><div class="img-wrapper">' +
|
'<div class="panel-content"><div class="img-wrapper">' +
|
||||||
'<img>' +
|
'<img>' +
|
||||||
|
|
@ -758,7 +769,7 @@ async function changeStudyForPanel(panelIdx, studyId) {
|
||||||
const seriesSelect = div.querySelector('.series-select');
|
const seriesSelect = div.querySelector('.series-select');
|
||||||
if (seriesSelect) {
|
if (seriesSelect) {
|
||||||
seriesSelect.innerHTML = '<option value="">Select series...</option>' +
|
seriesSelect.innerHTML = '<option value="">Select series...</option>' +
|
||||||
panel.seriesList.map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('');
|
panel.seriesList.map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-select best series
|
// Auto-select best series
|
||||||
|
|
@ -917,7 +928,7 @@ async function addPanel() {
|
||||||
'</select>' +
|
'</select>' +
|
||||||
'<select class="series-select" onchange="loadSeries(' + idx + ', this.value)">' +
|
'<select class="series-select" onchange="loadSeries(' + idx + ', this.value)">' +
|
||||||
'<option value="">Select series...</option>' +
|
'<option value="">Select series...</option>' +
|
||||||
panel.seriesList.map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('') +
|
panel.seriesList.map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('') +
|
||||||
'</select>';
|
'</select>';
|
||||||
|
|
||||||
div.innerHTML =
|
div.innerHTML =
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,499 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"encoding/csv"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
_ "github.com/mattn/go-sqlite3"
|
||||||
|
"inou/lib"
|
||||||
|
)
|
||||||
|
|
||||||
|
const refDBPath = "/tank/inou/data/reference.db"
|
||||||
|
|
||||||
|
var refDB *sql.DB
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
if len(os.Args) < 2 {
|
||||||
|
fmt.Fprintln(os.Stderr, `Usage:
|
||||||
|
loinc-lookup import <loinc_lab.csv> Import LOINC lab table into reference.db
|
||||||
|
loinc-lookup <name> [specimen] [unit] Look up LOINC code for a lab test
|
||||||
|
loinc-lookup batch <file.jsonl> Batch lookup from JSONL (one {"name","specimen","unit"} per line)
|
||||||
|
loinc-lookup stats Show cache statistics`)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
lib.ConfigInit()
|
||||||
|
if err := lib.RefDBInit(refDBPath); err != nil {
|
||||||
|
log.Fatalf("RefDBInit: %v", err)
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
refDB, err = sql.Open("sqlite3", refDBPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("open reference.db: %v", err)
|
||||||
|
}
|
||||||
|
defer refDB.Close()
|
||||||
|
|
||||||
|
switch os.Args[1] {
|
||||||
|
case "import":
|
||||||
|
if len(os.Args) < 3 {
|
||||||
|
log.Fatal("Usage: loinc-lookup import <loinc_lab.csv>")
|
||||||
|
}
|
||||||
|
cmdImport(os.Args[2])
|
||||||
|
case "stats":
|
||||||
|
cmdStats()
|
||||||
|
case "batch":
|
||||||
|
if len(os.Args) < 3 {
|
||||||
|
log.Fatal("Usage: loinc-lookup batch <file.jsonl>")
|
||||||
|
}
|
||||||
|
cmdBatch(os.Args[2])
|
||||||
|
default:
|
||||||
|
name := os.Args[1]
|
||||||
|
specimen := ""
|
||||||
|
unit := ""
|
||||||
|
if len(os.Args) > 2 {
|
||||||
|
specimen = os.Args[2]
|
||||||
|
}
|
||||||
|
if len(os.Args) > 3 {
|
||||||
|
unit = os.Args[3]
|
||||||
|
}
|
||||||
|
cmdLookup(name, specimen, unit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- import command ---
|
||||||
|
|
||||||
|
func cmdImport(csvPath string) {
|
||||||
|
// Create tables
|
||||||
|
for _, stmt := range []string{
|
||||||
|
`CREATE TABLE IF NOT EXISTS loinc_lab (
|
||||||
|
loinc_num TEXT PRIMARY KEY,
|
||||||
|
component TEXT NOT NULL,
|
||||||
|
property TEXT NOT NULL,
|
||||||
|
system TEXT NOT NULL,
|
||||||
|
scale TEXT NOT NULL,
|
||||||
|
method TEXT NOT NULL,
|
||||||
|
class TEXT NOT NULL,
|
||||||
|
long_name TEXT NOT NULL,
|
||||||
|
short_name TEXT NOT NULL
|
||||||
|
)`,
|
||||||
|
`CREATE TABLE IF NOT EXISTS loinc_cache (
|
||||||
|
cache_key TEXT PRIMARY KEY,
|
||||||
|
input_name TEXT NOT NULL,
|
||||||
|
input_specimen TEXT NOT NULL,
|
||||||
|
input_unit TEXT NOT NULL,
|
||||||
|
loinc_code TEXT NOT NULL,
|
||||||
|
loinc_name TEXT NOT NULL,
|
||||||
|
confidence TEXT NOT NULL DEFAULT 'llm'
|
||||||
|
)`,
|
||||||
|
} {
|
||||||
|
if _, err := refDB.Exec(stmt); err != nil {
|
||||||
|
log.Fatalf("create table: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Open(csvPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("open %s: %v", csvPath, err)
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
reader := csv.NewReader(f)
|
||||||
|
header, err := reader.Read()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("read header: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map column names to indices
|
||||||
|
colIdx := map[string]int{}
|
||||||
|
for i, h := range header {
|
||||||
|
colIdx[h] = i
|
||||||
|
}
|
||||||
|
need := []string{"LOINC_NUM", "COMPONENT", "PROPERTY", "SYSTEM", "SCALE_TYP", "METHOD_TYP", "CLASS", "LONG_COMMON_NAME", "SHORTNAME"}
|
||||||
|
for _, n := range need {
|
||||||
|
if _, ok := colIdx[n]; !ok {
|
||||||
|
log.Fatalf("missing column: %s", n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear and re-import
|
||||||
|
refDB.Exec("DELETE FROM loinc_lab")
|
||||||
|
|
||||||
|
tx, err := refDB.Begin()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("begin tx: %v", err)
|
||||||
|
}
|
||||||
|
stmt, err := tx.Prepare(`INSERT INTO loinc_lab (loinc_num, component, property, system, scale, method, class, long_name, short_name)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("prepare: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for {
|
||||||
|
row, err := reader.Read()
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
stmt.Exec(
|
||||||
|
row[colIdx["LOINC_NUM"]],
|
||||||
|
row[colIdx["COMPONENT"]],
|
||||||
|
row[colIdx["PROPERTY"]],
|
||||||
|
row[colIdx["SYSTEM"]],
|
||||||
|
row[colIdx["SCALE_TYP"]],
|
||||||
|
row[colIdx["METHOD_TYP"]],
|
||||||
|
row[colIdx["CLASS"]],
|
||||||
|
row[colIdx["LONG_COMMON_NAME"]],
|
||||||
|
row[colIdx["SHORTNAME"]],
|
||||||
|
)
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
stmt.Close()
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
log.Fatalf("commit: %v", err)
|
||||||
|
}
|
||||||
|
log.Printf("Imported %d LOINC lab codes", count)
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- lookup command ---
|
||||||
|
|
||||||
|
func cmdLookup(name, specimen, unit string) {
|
||||||
|
result, err := loincLookup(name, specimen, unit)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
out, _ := json.MarshalIndent(result, "", " ")
|
||||||
|
fmt.Println(string(out))
|
||||||
|
}
|
||||||
|
|
||||||
|
type LookupResult struct {
|
||||||
|
LoincCode string `json:"loinc_code"`
|
||||||
|
LoincName string `json:"loinc_name"`
|
||||||
|
Source string `json:"source"` // "cache" or "llm"
|
||||||
|
Candidates int `json:"candidates"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func loincLookup(name, specimen, unit string) (*LookupResult, error) {
|
||||||
|
// 1. Check cache
|
||||||
|
cacheKey := strings.ToLower(name + "|" + specimen + "|" + unit)
|
||||||
|
var cached []struct {
|
||||||
|
LoincCode string `db:"loinc_code"`
|
||||||
|
LoincName string `db:"loinc_name"`
|
||||||
|
}
|
||||||
|
lib.RefQuery("SELECT loinc_code, loinc_name FROM loinc_cache WHERE cache_key = ?", []any{cacheKey}, &cached)
|
||||||
|
if len(cached) > 0 {
|
||||||
|
return &LookupResult{
|
||||||
|
LoincCode: cached[0].LoincCode,
|
||||||
|
LoincName: cached[0].LoincName,
|
||||||
|
Source: "cache",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Expand input to LOINC terminology via LLM, then search
|
||||||
|
// Replace "%" with "percentage" so LLM connects to LOINC's "/100 leukocytes" naming
|
||||||
|
lookupUnit := unit
|
||||||
|
if lookupUnit == "%" {
|
||||||
|
lookupUnit = "percentage"
|
||||||
|
}
|
||||||
|
tokens := tokenize(name + " " + specimen + " " + lookupUnit)
|
||||||
|
if expanded, err := llmExpand(name, specimen, lookupUnit); err == nil {
|
||||||
|
tokens = expanded
|
||||||
|
}
|
||||||
|
candidates, _ := searchCandidates(tokens)
|
||||||
|
|
||||||
|
// If unit is %, drop candidates that are counts (#/volume, NCnc)
|
||||||
|
if unit == "%" {
|
||||||
|
var filtered []candidate
|
||||||
|
for _, c := range candidates {
|
||||||
|
if c.Property == "NCnc" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filtered = append(filtered, c)
|
||||||
|
}
|
||||||
|
if len(filtered) > 0 {
|
||||||
|
candidates = filtered
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(candidates) == 0 {
|
||||||
|
return nil, fmt.Errorf("no LOINC candidates found for %q", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. LLM pick from candidates
|
||||||
|
code, lname, err := llmPick(name, specimen, lookupUnit, candidates)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Cache
|
||||||
|
refDB.Exec(`INSERT OR REPLACE INTO loinc_cache (cache_key, input_name, input_specimen, input_unit, loinc_code, loinc_name, confidence)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, 'llm')`, cacheKey, name, specimen, unit, code, lname)
|
||||||
|
|
||||||
|
return &LookupResult{
|
||||||
|
LoincCode: code,
|
||||||
|
LoincName: lname,
|
||||||
|
Source: "llm",
|
||||||
|
Candidates: len(candidates),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func tokenize(s string) []string {
|
||||||
|
s = strings.ToLower(s)
|
||||||
|
// Replace common separators with spaces
|
||||||
|
for _, c := range []string{",", ";", "(", ")", "[", "]", "/", "-", ".", ":"} {
|
||||||
|
s = strings.ReplaceAll(s, c, " ")
|
||||||
|
}
|
||||||
|
var tokens []string
|
||||||
|
seen := map[string]bool{}
|
||||||
|
for _, t := range strings.Fields(s) {
|
||||||
|
if len(t) < 2 || seen[t] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
tokens = append(tokens, t)
|
||||||
|
seen[t] = true
|
||||||
|
}
|
||||||
|
return tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
type candidate struct {
|
||||||
|
LoincNum string `db:"loinc_num"`
|
||||||
|
LongName string `db:"long_name"`
|
||||||
|
ShortName string `db:"short_name"`
|
||||||
|
System string `db:"system"`
|
||||||
|
Component string `db:"component"`
|
||||||
|
Property string `db:"property"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func searchCandidates(tokens []string) ([]candidate, int) {
|
||||||
|
if len(tokens) == 0 {
|
||||||
|
return nil, 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query per token, collect into a map keyed by loinc_num
|
||||||
|
type entry struct {
|
||||||
|
c candidate
|
||||||
|
hits int // number of distinct tokens that matched
|
||||||
|
bonus int // extra score for quality of match
|
||||||
|
}
|
||||||
|
entries := map[string]*entry{}
|
||||||
|
|
||||||
|
for _, t := range tokens {
|
||||||
|
pattern := "%" + t + "%"
|
||||||
|
query := "SELECT loinc_num, long_name, short_name, system, component, property FROM loinc_lab WHERE " +
|
||||||
|
"LOWER(long_name) LIKE ? OR LOWER(short_name) LIKE ? OR LOWER(component) LIKE ?"
|
||||||
|
var results []candidate
|
||||||
|
lib.RefQuery(query, []any{pattern, pattern, pattern}, &results)
|
||||||
|
for _, c := range results {
|
||||||
|
if e, ok := entries[c.LoincNum]; ok {
|
||||||
|
e.hits++
|
||||||
|
} else {
|
||||||
|
entries[c.LoincNum] = &entry{c: c, hits: 1}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Require at least 2 token matches (or 1 if only 1 token)
|
||||||
|
minHits := 2
|
||||||
|
if len(tokens) <= 1 {
|
||||||
|
minHits = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Score: hits × 100 + bonus for component exactness (shorter component = more specific)
|
||||||
|
type scored struct {
|
||||||
|
c candidate
|
||||||
|
score int
|
||||||
|
}
|
||||||
|
var scoredResults []scored
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.hits < minHits {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
s := e.hits * 100
|
||||||
|
// Bonus: prefer entries where component is a simple term, not a compound like "Carboxyhemoglobin/Hemoglobin.total"
|
||||||
|
compLen := len(e.c.Component)
|
||||||
|
if compLen > 0 && compLen < 50 {
|
||||||
|
s += 50 - compLen // shorter component = higher bonus
|
||||||
|
}
|
||||||
|
// Bonus: prefer entries without "/" in component (simple analytes)
|
||||||
|
if !strings.Contains(e.c.Component, "/") {
|
||||||
|
s += 20
|
||||||
|
}
|
||||||
|
scoredResults = append(scoredResults, scored{e.c, s})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by score descending, take top 30
|
||||||
|
for i := range scoredResults {
|
||||||
|
for j := i + 1; j < len(scoredResults); j++ {
|
||||||
|
if scoredResults[j].score > scoredResults[i].score {
|
||||||
|
scoredResults[i], scoredResults[j] = scoredResults[j], scoredResults[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var top []candidate
|
||||||
|
maxHits := 0
|
||||||
|
for i, s := range scoredResults {
|
||||||
|
if i >= 30 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
top = append(top, s.c)
|
||||||
|
hits := s.score / 100 // extract hit count from score
|
||||||
|
if hits > maxHits {
|
||||||
|
maxHits = hits
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return top, maxHits
|
||||||
|
}
|
||||||
|
|
||||||
|
func llmExpand(name, specimen, unit string) ([]string, error) {
|
||||||
|
prompt := fmt.Sprintf(`Given a lab test, return search terms to find it in the LOINC database.
|
||||||
|
LOINC uses formal medical terminology (e.g. "Leukocytes" not "White Blood Cells", "Erythrocytes" not "Red Blood Cells", "Oxygen" not "O2" or "pO2").
|
||||||
|
|
||||||
|
Lab test:
|
||||||
|
Name: %s
|
||||||
|
Specimen: %s
|
||||||
|
Unit: %s
|
||||||
|
|
||||||
|
Return a JSON object: {"terms": ["term1", "term2", ...]}
|
||||||
|
Include: the LOINC component name, specimen system code (e.g. Bld, BldA, BldC, BldV, Ser/Plas, Urine), and any synonyms that might appear in LOINC long names.
|
||||||
|
Keep it to 3-6 terms. JSON only.`, name, specimen, unit)
|
||||||
|
|
||||||
|
resp, err := lib.CallGemini(prompt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var result struct {
|
||||||
|
Terms []string `json:"terms"`
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||||
|
return nil, fmt.Errorf("parse expand response %q: %w", resp, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lowercase all terms and add original input tokens as fallback
|
||||||
|
var terms []string
|
||||||
|
seen := map[string]bool{}
|
||||||
|
for _, t := range result.Terms {
|
||||||
|
t = strings.ToLower(strings.TrimSpace(t))
|
||||||
|
if t != "" && !seen[t] {
|
||||||
|
terms = append(terms, t)
|
||||||
|
seen[t] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Also include original tokens so we never lose the raw input
|
||||||
|
for _, t := range tokenize(name + " " + specimen) {
|
||||||
|
if !seen[t] {
|
||||||
|
terms = append(terms, t)
|
||||||
|
seen[t] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return terms, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func llmPick(name, specimen, unit string, candidates []candidate) (string, string, error) {
|
||||||
|
// Format candidates as a numbered list
|
||||||
|
// Replace "/100" and "fraction" with "percentage" so LLM connects them to "%" unit
|
||||||
|
var lines []string
|
||||||
|
for i, c := range candidates {
|
||||||
|
display := c.LongName
|
||||||
|
display = strings.ReplaceAll(display, "/100 ", "percentage of ")
|
||||||
|
display = strings.ReplaceAll(display, "fraction", "percentage")
|
||||||
|
lines = append(lines, fmt.Sprintf("%d. %s — %s [System: %s]", i+1, c.LoincNum, display, c.System))
|
||||||
|
}
|
||||||
|
|
||||||
|
prompt := fmt.Sprintf(`You are a clinical laboratory informatics system. Given a lab test, pick the BEST matching LOINC code from the candidate list.
|
||||||
|
|
||||||
|
Lab test:
|
||||||
|
Name: %s
|
||||||
|
Specimen: %s
|
||||||
|
Unit: %s
|
||||||
|
|
||||||
|
Candidates:
|
||||||
|
%s
|
||||||
|
|
||||||
|
Return ONLY a JSON object: {"pick": <number>, "loinc": "<code>", "name": "<long name>"}
|
||||||
|
Pick the candidate that best matches the test name, specimen type, and unit. If none match well, pick the closest.
|
||||||
|
JSON only, no explanation.`, name, specimen, unit, strings.Join(lines, "\n"))
|
||||||
|
|
||||||
|
resp, err := lib.CallGemini(prompt)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", fmt.Errorf("LLM call failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var result struct {
|
||||||
|
Pick int `json:"pick"`
|
||||||
|
Loinc string `json:"loinc"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||||
|
return "", "", fmt.Errorf("parse LLM response %q: %w", resp, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve by pick number if loinc field is empty
|
||||||
|
if result.Loinc == "" && result.Pick > 0 && result.Pick <= len(candidates) {
|
||||||
|
result.Loinc = candidates[result.Pick-1].LoincNum
|
||||||
|
result.Name = candidates[result.Pick-1].LongName
|
||||||
|
}
|
||||||
|
// Verify the code is actually in our candidate list
|
||||||
|
for _, c := range candidates {
|
||||||
|
if c.LoincNum == result.Loinc {
|
||||||
|
return result.Loinc, c.LongName, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Pick number as fallback
|
||||||
|
if result.Pick > 0 && result.Pick <= len(candidates) {
|
||||||
|
c := candidates[result.Pick-1]
|
||||||
|
return c.LoincNum, c.LongName, nil
|
||||||
|
}
|
||||||
|
return "", "", fmt.Errorf("LLM returned %q (pick %d) — not in %d candidates", result.Loinc, result.Pick, len(candidates))
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- batch command ---
|
||||||
|
|
||||||
|
func cmdBatch(path string) {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("read %s: %v", path, err)
|
||||||
|
}
|
||||||
|
for _, line := range strings.Split(string(data), "\n") {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if line == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var input struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Specimen string `json:"specimen"`
|
||||||
|
Unit string `json:"unit"`
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal([]byte(line), &input); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "skip bad line: %s\n", line)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result, err := loincLookup(input.Name, input.Specimen, input.Unit)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s: %v\n", input.Name, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Printf("%-40s → %s %s [%s]\n", input.Name, result.LoincCode, result.LoincName, result.Source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- stats command ---
|
||||||
|
|
||||||
|
func cmdStats() {
|
||||||
|
var total []struct{ N int `db:"n"` }
|
||||||
|
lib.RefQuery("SELECT COUNT(*) as n FROM loinc_lab", nil, &total)
|
||||||
|
if len(total) > 0 {
|
||||||
|
fmt.Printf("LOINC lab codes: %d\n", total[0].N)
|
||||||
|
}
|
||||||
|
|
||||||
|
var cached []struct{ N int `db:"n"` }
|
||||||
|
lib.RefQuery("SELECT COUNT(*) as n FROM loinc_cache", nil, &cached)
|
||||||
|
if len(cached) > 0 {
|
||||||
|
fmt.Printf("Cached lookups: %d\n", cached[0].N)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,720 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>inou — Genetics</title>
|
||||||
|
<style>
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Regular.ttf'); font-weight: 400; }
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Light.ttf'); font-weight: 300; }
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-SemiBold.ttf'); font-weight: 600; }
|
||||||
|
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Bold.ttf'); font-weight: 700; }
|
||||||
|
|
||||||
|
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||||
|
|
||||||
|
:root {
|
||||||
|
--amber: #B45309;
|
||||||
|
--amber-light: #FEF3C7;
|
||||||
|
--amber-mid: #F59E0B;
|
||||||
|
--bg: #F8F7F6;
|
||||||
|
--surface: #FFFFFF;
|
||||||
|
--border: #E5E3E0;
|
||||||
|
--text: #1A1A1A;
|
||||||
|
--text-muted: #6B6968;
|
||||||
|
--text-faint: #A8A5A2;
|
||||||
|
--green: #15803D;
|
||||||
|
--green-light: #DCFCE7;
|
||||||
|
--nav-bg: #1C1917;
|
||||||
|
--nav-text: #D6D3D1;
|
||||||
|
--nav-active: #FFFFFF;
|
||||||
|
--sidebar-w: 220px;
|
||||||
|
--topbar-h: 52px;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Sora', system-ui, sans-serif;
|
||||||
|
background: var(--bg);
|
||||||
|
color: var(--text);
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.5;
|
||||||
|
height: 100vh;
|
||||||
|
overflow: hidden;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* TOP NAV */
|
||||||
|
.topbar {
|
||||||
|
height: var(--topbar-h);
|
||||||
|
background: var(--nav-bg);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
padding: 0 20px;
|
||||||
|
gap: 16px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
border-bottom: 1px solid #2C2A28;
|
||||||
|
}
|
||||||
|
.topbar-logo {
|
||||||
|
font-weight: 700;
|
||||||
|
font-size: 16px;
|
||||||
|
color: #FFFFFF;
|
||||||
|
letter-spacing: -0.3px;
|
||||||
|
}
|
||||||
|
.topbar-logo span { color: var(--amber); }
|
||||||
|
.topbar-patient {
|
||||||
|
margin-left: auto;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
.topbar-patient-name {
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--nav-text);
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
.topbar-patient-dob {
|
||||||
|
font-size: 12px;
|
||||||
|
color: #78716C;
|
||||||
|
}
|
||||||
|
.avatar {
|
||||||
|
width: 30px; height: 30px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: var(--amber);
|
||||||
|
display: flex; align-items: center; justify-content: center;
|
||||||
|
font-size: 12px; font-weight: 700; color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* LAYOUT */
|
||||||
|
.layout {
|
||||||
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* SIDEBAR */
|
||||||
|
.sidebar {
|
||||||
|
width: var(--sidebar-w);
|
||||||
|
background: var(--nav-bg);
|
||||||
|
flex-shrink: 0;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding: 12px 0;
|
||||||
|
border-right: 1px solid #2C2A28;
|
||||||
|
}
|
||||||
|
.nav-section-label {
|
||||||
|
font-size: 10px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: #57534E;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.8px;
|
||||||
|
padding: 12px 16px 4px;
|
||||||
|
}
|
||||||
|
.nav-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
padding: 7px 16px;
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--nav-text);
|
||||||
|
cursor: pointer;
|
||||||
|
border-radius: 0;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
.nav-item:hover { background: #292524; }
|
||||||
|
.nav-item.active {
|
||||||
|
color: var(--nav-active);
|
||||||
|
background: #292524;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
.nav-item.active::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
width: 3px;
|
||||||
|
height: 28px;
|
||||||
|
background: var(--amber);
|
||||||
|
border-radius: 0 2px 2px 0;
|
||||||
|
}
|
||||||
|
.nav-item { position: relative; }
|
||||||
|
.nav-dot {
|
||||||
|
width: 6px; height: 6px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: #57534E;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.nav-dot.active { background: var(--amber); }
|
||||||
|
.nav-sub {
|
||||||
|
padding-left: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* MAIN */
|
||||||
|
.main {
|
||||||
|
flex: 1;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding: 24px 32px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* BREADCRUMB + SEARCH ROW */
|
||||||
|
.top-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
.breadcrumb {
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
.breadcrumb span { color: var(--text); font-weight: 600; }
|
||||||
|
.breadcrumb-sep { color: var(--text-faint); }
|
||||||
|
|
||||||
|
.search-box {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 7px 12px;
|
||||||
|
width: 260px;
|
||||||
|
}
|
||||||
|
.search-box input {
|
||||||
|
border: none;
|
||||||
|
outline: none;
|
||||||
|
font-family: 'Sora', sans-serif;
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text);
|
||||||
|
background: transparent;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
.search-box input::placeholder { color: var(--text-faint); }
|
||||||
|
.search-icon { color: var(--text-faint); font-size: 15px; }
|
||||||
|
|
||||||
|
/* STATS ROW */
|
||||||
|
.stats-row {
|
||||||
|
display: flex;
|
||||||
|
gap: 24px;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
.stat {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 1px;
|
||||||
|
}
|
||||||
|
.stat-value {
|
||||||
|
font-size: 22px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--text);
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
.stat-label {
|
||||||
|
font-size: 11px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
}
|
||||||
|
.stat-divider {
|
||||||
|
width: 1px;
|
||||||
|
height: 32px;
|
||||||
|
background: var(--border);
|
||||||
|
}
|
||||||
|
.hidden-note {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-faint);
|
||||||
|
padding: 4px 10px;
|
||||||
|
background: var(--bg);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 20px;
|
||||||
|
margin-left: auto;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.hidden-note:hover { border-color: var(--amber); color: var(--amber); }
|
||||||
|
|
||||||
|
/* TIER GRID */
|
||||||
|
.tier-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(4, 1fr);
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
.tier-tile {
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 10px;
|
||||||
|
padding: 14px 16px;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: border-color 0.15s, box-shadow 0.15s;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
.tier-tile:hover {
|
||||||
|
border-color: #D1C5BA;
|
||||||
|
box-shadow: 0 2px 8px rgba(0,0,0,0.06);
|
||||||
|
}
|
||||||
|
.tier-tile.active {
|
||||||
|
border-color: var(--amber);
|
||||||
|
box-shadow: 0 0 0 1px var(--amber);
|
||||||
|
}
|
||||||
|
.tier-name {
|
||||||
|
font-size: 13px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.tier-count {
|
||||||
|
font-size: 20px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--text);
|
||||||
|
line-height: 1.1;
|
||||||
|
}
|
||||||
|
.tier-hidden {
|
||||||
|
font-size: 11px;
|
||||||
|
color: var(--text-faint);
|
||||||
|
}
|
||||||
|
.tier-tile.large .tier-count { color: var(--amber); }
|
||||||
|
|
||||||
|
/* EXPANDED TIER */
|
||||||
|
.expanded-section {
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 12px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
.expanded-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 14px 20px;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
background: #FAFAF9;
|
||||||
|
}
|
||||||
|
.expanded-title {
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.expanded-subtitle {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-weight: 400;
|
||||||
|
}
|
||||||
|
.toggle-hidden {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.toggle-pill {
|
||||||
|
width: 28px; height: 16px;
|
||||||
|
background: var(--border);
|
||||||
|
border-radius: 8px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.toggle-pill::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
top: 2px; left: 2px;
|
||||||
|
width: 12px; height: 12px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: white;
|
||||||
|
box-shadow: 0 1px 2px rgba(0,0,0,0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* VARIANTS TABLE */
|
||||||
|
.variants-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
.variants-table th {
|
||||||
|
text-align: left;
|
||||||
|
font-size: 11px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-faint);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
padding: 10px 20px;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
background: #FAFAF9;
|
||||||
|
}
|
||||||
|
.variants-table td {
|
||||||
|
padding: 11px 20px;
|
||||||
|
border-bottom: 1px solid #F0EDEA;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
.variants-table tr:last-child td { border-bottom: none; }
|
||||||
|
.variants-table tr:hover td { background: #FAFAF9; }
|
||||||
|
|
||||||
|
.gene-name {
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text);
|
||||||
|
font-family: 'Sora', monospace;
|
||||||
|
}
|
||||||
|
.rsid {
|
||||||
|
font-size: 11px;
|
||||||
|
color: var(--text-faint);
|
||||||
|
margin-top: 1px;
|
||||||
|
}
|
||||||
|
.finding-text {
|
||||||
|
font-size: 13px;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.genotype {
|
||||||
|
font-family: 'Sora', monospace;
|
||||||
|
font-size: 13px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text);
|
||||||
|
background: #F4F1EE;
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: 4px;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.sig-dot {
|
||||||
|
width: 8px; height: 8px;
|
||||||
|
border-radius: 50%;
|
||||||
|
display: inline-block;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.sig-cell {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.sig-dot.moderate { background: var(--amber); }
|
||||||
|
.sig-dot.protective { background: var(--green); }
|
||||||
|
.sig-dot.low { background: var(--text-faint); }
|
||||||
|
.sig-dot.clear { background: #D4D0CB; }
|
||||||
|
|
||||||
|
.sig-label.moderate { color: var(--amber); }
|
||||||
|
.sig-label.protective { color: var(--green); }
|
||||||
|
.sig-label.low { color: var(--text-faint); }
|
||||||
|
.sig-label.clear { color: var(--text-faint); }
|
||||||
|
|
||||||
|
/* EXPANDED FOOTER */
|
||||||
|
.expanded-footer {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 12px 20px;
|
||||||
|
border-top: 1px solid var(--border);
|
||||||
|
background: #FAFAF9;
|
||||||
|
}
|
||||||
|
.footer-count {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
.load-more {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--amber);
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
.load-more:hover { text-decoration: underline; }
|
||||||
|
|
||||||
|
/* AI CTA */
|
||||||
|
.ai-cta {
|
||||||
|
background: var(--nav-bg);
|
||||||
|
border-radius: 12px;
|
||||||
|
padding: 16px 20px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 16px;
|
||||||
|
}
|
||||||
|
.ai-cta-text {
|
||||||
|
flex: 1;
|
||||||
|
font-size: 13px;
|
||||||
|
color: #A8A5A2;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
.ai-cta-text strong { color: #FFFFFF; font-weight: 600; }
|
||||||
|
.ai-cta-btn {
|
||||||
|
background: var(--amber);
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 9px 18px;
|
||||||
|
font-family: 'Sora', sans-serif;
|
||||||
|
font-size: 13px;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.ai-cta-btn:hover { background: #9A4507; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<!-- TOP NAV -->
|
||||||
|
<div class="topbar">
|
||||||
|
<div class="topbar-logo">inou<span>.</span></div>
|
||||||
|
<div class="topbar-patient">
|
||||||
|
<div>
|
||||||
|
<div class="topbar-patient-name">Jane Doe</div>
|
||||||
|
<div class="topbar-patient-dob">DOB Jan 1 2017 · Female</div>
|
||||||
|
</div>
|
||||||
|
<div class="avatar">JD</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="layout">
|
||||||
|
|
||||||
|
<!-- SIDEBAR -->
|
||||||
|
<nav class="sidebar">
|
||||||
|
<div class="nav-section-label">Overview</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Dashboard</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Tests</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Labs</a>
|
||||||
|
<a class="nav-item active" href="#"><span class="nav-dot active"></span>Genetics</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Imaging</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Assessments</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Body</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Vitals</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Exercise</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Nutrition</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Sleep</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Treatment</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Medications</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Supplements</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Therapy</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">History</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Diagnoses</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Symptoms</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Family History</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Care Team</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Consultations</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Providers</a>
|
||||||
|
|
||||||
|
<div class="nav-section-label">Files</div>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Documents</a>
|
||||||
|
<a class="nav-item" href="#"><span class="nav-dot"></span>Uploads</a>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<!-- MAIN CONTENT -->
|
||||||
|
<main class="main">
|
||||||
|
|
||||||
|
<!-- BREADCRUMB + SEARCH -->
|
||||||
|
<div class="top-row">
|
||||||
|
<div class="breadcrumb">
|
||||||
|
Jane Doe <span class="breadcrumb-sep">›</span> Tests <span class="breadcrumb-sep">›</span> <span>Genetics</span>
|
||||||
|
</div>
|
||||||
|
<div class="search-box">
|
||||||
|
<span class="search-icon">⌕</span>
|
||||||
|
<input type="text" placeholder="Search gene or rsID…">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- STATS ROW -->
|
||||||
|
<div class="stats-row">
|
||||||
|
<div class="stat">
|
||||||
|
<div class="stat-value">3,866</div>
|
||||||
|
<div class="stat-label">Total variants</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-divider"></div>
|
||||||
|
<div class="stat">
|
||||||
|
<div class="stat-value">12</div>
|
||||||
|
<div class="stat-label">Categories</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-divider"></div>
|
||||||
|
<div class="stat">
|
||||||
|
<div class="stat-value">597</div>
|
||||||
|
<div class="stat-label">Hidden (no risk)</div>
|
||||||
|
</div>
|
||||||
|
<div class="hidden-note">Show hidden variants</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- TIER GRID -->
|
||||||
|
<div class="tier-grid">
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Traits</div>
|
||||||
|
<div class="tier-count">132</div>
|
||||||
|
<div class="tier-hidden">49 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Longevity</div>
|
||||||
|
<div class="tier-count">12</div>
|
||||||
|
<div class="tier-hidden">1 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile active">
|
||||||
|
<div class="tier-name">Metabolism</div>
|
||||||
|
<div class="tier-count">97</div>
|
||||||
|
<div class="tier-hidden">51 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Medications</div>
|
||||||
|
<div class="tier-count">101</div>
|
||||||
|
<div class="tier-hidden">26 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Mental Health</div>
|
||||||
|
<div class="tier-count">63</div>
|
||||||
|
<div class="tier-hidden">31 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Neurological</div>
|
||||||
|
<div class="tier-count">91</div>
|
||||||
|
<div class="tier-hidden">46 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Fertility</div>
|
||||||
|
<div class="tier-count">12</div>
|
||||||
|
<div class="tier-hidden">7 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Blood</div>
|
||||||
|
<div class="tier-count">100</div>
|
||||||
|
<div class="tier-hidden">12 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Cardiovascular</div>
|
||||||
|
<div class="tier-count">104</div>
|
||||||
|
<div class="tier-hidden">31 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile">
|
||||||
|
<div class="tier-name">Autoimmune</div>
|
||||||
|
<div class="tier-count">80</div>
|
||||||
|
<div class="tier-hidden">43 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile large">
|
||||||
|
<div class="tier-name">Disease</div>
|
||||||
|
<div class="tier-count">2,272</div>
|
||||||
|
<div class="tier-hidden">233 hidden</div>
|
||||||
|
</div>
|
||||||
|
<div class="tier-tile large">
|
||||||
|
<div class="tier-name">Cancer</div>
|
||||||
|
<div class="tier-count">998</div>
|
||||||
|
<div class="tier-hidden">67 hidden</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- EXPANDED: METABOLISM -->
|
||||||
|
<div class="expanded-section">
|
||||||
|
<div class="expanded-header">
|
||||||
|
<div>
|
||||||
|
<div class="expanded-title">Metabolism <span style="font-weight:400; color: var(--text-muted)">· 97 variants</span></div>
|
||||||
|
<div class="expanded-subtitle">Sorted by significance</div>
|
||||||
|
</div>
|
||||||
|
<div class="toggle-hidden">
|
||||||
|
<span>Show hidden</span>
|
||||||
|
<div class="toggle-pill"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="variants-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th style="width:130px">Gene</th>
|
||||||
|
<th>Finding</th>
|
||||||
|
<th style="width:90px">Genotype</th>
|
||||||
|
<th style="width:120px">Significance</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">MTHFR</div><div class="rsid">rs1801133</div></td>
|
||||||
|
<td><div class="finding-text">10–20% folate processing efficiency</div></td>
|
||||||
|
<td><span class="genotype">AA</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">CYP2C19</div><div class="rsid">rs4244285</div></td>
|
||||||
|
<td><div class="finding-text">Poorer metabolizer of several medicines</div></td>
|
||||||
|
<td><span class="genotype">AG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">PPARG</div><div class="rsid">rs1801282</div></td>
|
||||||
|
<td><div class="finding-text">Higher cardiovascular risk with high fat diet</div></td>
|
||||||
|
<td><span class="genotype">CG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">TCF7L2</div><div class="rsid">rs7903146</div></td>
|
||||||
|
<td><div class="finding-text">Increased type 2 diabetes risk</div></td>
|
||||||
|
<td><span class="genotype">CT</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">FTO</div><div class="rsid">rs9939609</div></td>
|
||||||
|
<td><div class="finding-text">1.67× increased obesity risk</div></td>
|
||||||
|
<td><span class="genotype">AT</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">SLCO1B1</div><div class="rsid">rs4149056</div></td>
|
||||||
|
<td><div class="finding-text">Increased statin-induced myopathy risk</div></td>
|
||||||
|
<td><span class="genotype">CT</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">APOA2</div><div class="rsid">rs5082</div></td>
|
||||||
|
<td><div class="finding-text">Associated with higher HDL cholesterol</div></td>
|
||||||
|
<td><span class="genotype">CC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot protective"></span><span class="sig-label protective">Protective</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">CYP1A2</div><div class="rsid">rs762551</div></td>
|
||||||
|
<td><div class="finding-text">Slow caffeine metabolizer</div></td>
|
||||||
|
<td><span class="genotype">AC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">CYP3A5</div><div class="rsid">rs776746</div></td>
|
||||||
|
<td><div class="finding-text">Non-expressor — affects drug dosing</div></td>
|
||||||
|
<td><span class="genotype">CC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">MCM6</div><div class="rsid">rs4988235</div></td>
|
||||||
|
<td><div class="finding-text">Partial lactase persistence</div></td>
|
||||||
|
<td><span class="genotype">AG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">APOE</div><div class="rsid">rs7412</div></td>
|
||||||
|
<td><div class="finding-text">Normal lipid metabolism</div></td>
|
||||||
|
<td><span class="genotype">CC</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot clear"></span><span class="sig-label clear">Clear</span></div></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><div class="gene-name">GCK</div><div class="rsid">rs1799884</div></td>
|
||||||
|
<td><div class="finding-text">Slightly reduced glucose sensing</div></td>
|
||||||
|
<td><span class="genotype">AG</span></td>
|
||||||
|
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<div class="expanded-footer">
|
||||||
|
<div class="footer-count">Showing 12 of 97 variants</div>
|
||||||
|
<a class="load-more" href="#">Load more</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- AI CTA -->
|
||||||
|
<div class="ai-cta">
|
||||||
|
<div class="ai-cta-text">
|
||||||
|
<strong>Your AI has access to all 3,866 variants</strong>, including hidden ones. Ask it to reason across your metabolism, medication sensitivities, and disease risk together.
|
||||||
|
</div>
|
||||||
|
<button class="ai-cta-btn">Ask Claude about your genetics →</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
Loading…
Reference in New Issue