Compare commits
10 Commits
5ebf9925ed
...
c1a269f3ae
| Author | SHA1 | Date |
|---|---|---|
|
|
c1a269f3ae | |
|
|
a55271f863 | |
|
|
257a021669 | |
|
|
13e991aa1c | |
|
|
f58a4f804e | |
|
|
831ab61445 | |
|
|
f2e352ebcf | |
|
|
ade93669d3 | |
|
|
bf57e28e71 | |
|
|
989969375d |
|
|
@ -1,63 +1,13 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
_ "embed"
|
||||
"encoding/json"
|
||||
"image"
|
||||
"image/color"
|
||||
"image/draw"
|
||||
"image/png"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/chai2010/webp"
|
||||
"golang.org/x/image/font"
|
||||
"golang.org/x/image/font/opentype"
|
||||
"golang.org/x/image/math/fixed"
|
||||
"inou/lib"
|
||||
|
||||
xdraw "golang.org/x/image/draw"
|
||||
)
|
||||
|
||||
//go:embed Sora-Regular.ttf
|
||||
var soraRegularData []byte
|
||||
|
||||
//go:embed Sora-SemiBold.ttf
|
||||
var soraSemiBoldData []byte
|
||||
|
||||
var (
|
||||
soraFace14 font.Face
|
||||
soraFace12 font.Face
|
||||
soraBoldFace14 font.Face
|
||||
soraBoldFace20 font.Face
|
||||
)
|
||||
|
||||
func init() {
|
||||
regular, err := opentype.Parse(soraRegularData)
|
||||
if err != nil {
|
||||
log.Printf("Failed to parse Sora Regular: %v", err)
|
||||
return
|
||||
}
|
||||
semibold, err := opentype.Parse(soraSemiBoldData)
|
||||
if err != nil {
|
||||
log.Printf("Failed to parse Sora SemiBold: %v", err)
|
||||
return
|
||||
}
|
||||
soraFace14, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 14, DPI: 72})
|
||||
soraFace12, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 12, DPI: 72})
|
||||
soraBoldFace14, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 14, DPI: 72})
|
||||
soraBoldFace20, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 20, DPI: 72})
|
||||
}
|
||||
|
||||
const thumbSize = 128
|
||||
const headerHeight = 58
|
||||
const cols = 12
|
||||
const padding = 2 // separation between contacts
|
||||
|
||||
func handleContactSheet(w http.ResponseWriter, r *http.Request) {
|
||||
seriesHex := strings.TrimPrefix(r.URL.Path, "/contact-sheet.webp/")
|
||||
if seriesHex == "" || len(seriesHex) != 16 {
|
||||
|
|
@ -65,359 +15,27 @@ func handleContactSheet(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
// Get access context
|
||||
ctx := getAccessContextOrFail(w, r)
|
||||
if ctx == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Parse optional window/level overrides
|
||||
q := r.URL.Query()
|
||||
var wcOverride, wwOverride float64
|
||||
var hasWLOverride bool
|
||||
if wc := q.Get("wc"); wc != "" {
|
||||
wcOverride, _ = strconv.ParseFloat(wc, 64)
|
||||
hasWLOverride = true
|
||||
var wc, ww float64
|
||||
if v := q.Get("wc"); v != "" {
|
||||
wc, _ = strconv.ParseFloat(v, 64)
|
||||
}
|
||||
if ww := q.Get("ww"); ww != "" {
|
||||
wwOverride, _ = strconv.ParseFloat(ww, 64)
|
||||
hasWLOverride = true
|
||||
if v := q.Get("ww"); v != "" {
|
||||
ww, _ = strconv.ParseFloat(v, 64)
|
||||
}
|
||||
|
||||
seriesID := seriesHex
|
||||
|
||||
// Look up series entry (RBAC already checked by portal)
|
||||
series, err := lib.EntryGet(ctx, seriesID)
|
||||
if err != nil {
|
||||
http.Error(w, "Series not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
dossierID := series.DossierID
|
||||
seriesDesc := series.Tags
|
||||
|
||||
// Look up study entry (parent of series)
|
||||
study, err := lib.EntryGet(nil, series.ParentID)
|
||||
if err != nil {
|
||||
http.Error(w, "Study not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
var studyData struct {
|
||||
StudyDate string `json:"study_date"`
|
||||
StudyDesc string `json:"study_description"`
|
||||
}
|
||||
json.Unmarshal([]byte(study.Data), &studyData)
|
||||
|
||||
// Look up dossier for patient name
|
||||
dossier, _ := lib.DossierGet("", dossierID)
|
||||
patientName := ""
|
||||
if dossier != nil {
|
||||
patientName = dossier.Name
|
||||
}
|
||||
|
||||
// Get all slices for this series
|
||||
entries, err := lib.EntryChildrenByType(dossierID, seriesID, "slice")
|
||||
body, err := lib.RenderContactSheet(ctx.AccessorID, seriesHex, wc, ww)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if len(entries) == 0 {
|
||||
http.Error(w, "No slices found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
// Get slice thickness for step calculation
|
||||
var firstSliceData struct {
|
||||
SliceThickness float64 `json:"slice_thickness"`
|
||||
}
|
||||
json.Unmarshal([]byte(entries[0].Data), &firstSliceData)
|
||||
step := calculateStepSize(5.0, firstSliceData.SliceThickness)
|
||||
|
||||
// Load and resize selected slices (skip based on 5mm spacing)
|
||||
type thumbInfo struct {
|
||||
img image.Image
|
||||
sliceNum int
|
||||
pos float64
|
||||
}
|
||||
var thumbs []thumbInfo
|
||||
var usedWC, usedWW float64 // track what window settings were actually used
|
||||
for i, e := range entries {
|
||||
// Same logic as dicom-import: every Nth slice starting at 1
|
||||
if (i+1)%step != 1 && step != 1 {
|
||||
continue
|
||||
}
|
||||
thumb, pos, wc, ww := loadSliceThumbWithPos(ctx, e, wcOverride, wwOverride, hasWLOverride)
|
||||
if thumb != nil {
|
||||
thumbs = append(thumbs, thumbInfo{img: thumb, sliceNum: i + 1, pos: pos})
|
||||
if usedWC == 0 && usedWW == 0 {
|
||||
usedWC, usedWW = wc, ww // capture from first slice
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(thumbs) == 0 {
|
||||
http.Error(w, "Could not load any images", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Calculate grid
|
||||
gridCols := cols
|
||||
if len(thumbs) < gridCols {
|
||||
gridCols = len(thumbs)
|
||||
}
|
||||
gridRows := (len(thumbs) + gridCols - 1) / gridCols
|
||||
|
||||
outWidth := cols*thumbSize + (cols-1)*padding
|
||||
outHeight := headerHeight + gridRows*thumbSize + (gridRows-1)*padding
|
||||
|
||||
out := image.NewRGBA(image.Rect(0, 0, outWidth, outHeight))
|
||||
// Fill with dark grey for grid lines (contrast between contacts)
|
||||
draw.Draw(out, out.Bounds(), &image.Uniform{color.RGBA{80, 80, 80, 255}}, image.Point{}, draw.Src)
|
||||
|
||||
// Draw header
|
||||
drawHeader(out, patientName, studyData.StudyDesc, seriesDesc, studyData.StudyDate, len(entries), firstSliceData.SliceThickness, step, usedWC, usedWW)
|
||||
|
||||
// Draw thumbnails
|
||||
for i, t := range thumbs {
|
||||
col := i % cols
|
||||
row := i / cols
|
||||
x := col * (thumbSize + padding)
|
||||
y := headerHeight + row*(thumbSize+padding)
|
||||
|
||||
draw.Draw(out, image.Rect(x, y, x+thumbSize, y+thumbSize), t.img, image.Point{}, draw.Src)
|
||||
drawNumber(out, x+2, y+2, t.sliceNum)
|
||||
drawPosition(out, x+thumbSize-2, y+2, t.pos)
|
||||
}
|
||||
|
||||
// Fill unused grid cells with black
|
||||
for i := len(thumbs); i < gridRows*cols; i++ {
|
||||
col := i % cols
|
||||
row := i / cols
|
||||
x := col * (thumbSize + padding)
|
||||
y := headerHeight + row*(thumbSize+padding)
|
||||
draw.Draw(out, image.Rect(x, y, x+thumbSize, y+thumbSize), &image.Uniform{color.Black}, image.Point{}, draw.Src)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "image/webp")
|
||||
w.Header().Set("Cache-Control", "public, max-age=3600")
|
||||
webp.Encode(w, out, &webp.Options{Quality: 10})
|
||||
}
|
||||
|
||||
func calculateStepSize(requestedSpacingMM, sliceThicknessMM float64) int {
|
||||
if sliceThicknessMM <= 0 {
|
||||
return 1
|
||||
}
|
||||
step := int(math.Round(requestedSpacingMM / sliceThicknessMM))
|
||||
if step < 1 {
|
||||
step = 1
|
||||
}
|
||||
return step
|
||||
}
|
||||
|
||||
func drawHeader(img *image.RGBA, patient, study, series, date string, totalSlices int, sliceThickness float64, step int, wc, ww float64) {
|
||||
// Format date if in YYYYMMDD format
|
||||
if len(date) == 8 {
|
||||
date = date[0:4] + "-" + date[4:6] + "-" + date[6:8]
|
||||
}
|
||||
|
||||
// Draw header background
|
||||
draw.Draw(img, image.Rect(0, 0, img.Bounds().Dx(), headerHeight),
|
||||
&image.Uniform{color.RGBA{32, 32, 32, 255}}, image.Point{}, draw.Src)
|
||||
|
||||
// Line 1: Big red warning
|
||||
warning := "!! NAVIGATION ONLY - USE fetch_image FOR DIAGNOSIS !!"
|
||||
drawStringBold20(img, 10, 22, warning, color.RGBA{255, 50, 50, 255})
|
||||
|
||||
// Line 2: Patient (white) | Series | Date | Slices | ST | WC/WW
|
||||
stInfo := strconv.FormatFloat(sliceThickness, 'f', 1, 64) + "mm"
|
||||
if step > 1 {
|
||||
stInfo += " (every " + strconv.Itoa(step) + ")"
|
||||
}
|
||||
wlInfo := "WC:" + strconv.FormatFloat(wc, 'f', 0, 64) + " WW:" + strconv.FormatFloat(ww, 'f', 0, 64)
|
||||
|
||||
// Line 2: Patient (white) | Series | Date | Slices | ST | WC/WW
|
||||
drawStringBold(img, 10, 38, patient, color.RGBA{255, 255, 255, 255})
|
||||
patientWidth := measureStringBold(patient)
|
||||
rest := " | " + series + " | " + date + " | " + strconv.Itoa(totalSlices) + " slices | ST " + stInfo + " | " + wlInfo
|
||||
drawString(img, 10+patientWidth, 38, rest, color.RGBA{200, 200, 200, 255})
|
||||
|
||||
// Line 3: Instructions and legend
|
||||
instructions := "MCP: fetch_image(slice_id) | API: GET /image/{slice_id} | Top-left: slice# Top-right: position(mm)"
|
||||
drawString(img, 10, 54, instructions, color.RGBA{255, 255, 255, 255})
|
||||
}
|
||||
|
||||
func drawPosition(img *image.RGBA, x, y int, pos float64) {
|
||||
s := strconv.FormatFloat(pos, 'f', 1, 64) + "mm"
|
||||
textWidth := measureString(s, 12) + 6
|
||||
|
||||
// Draw background (right-aligned)
|
||||
for dy := 0; dy < 16; dy++ {
|
||||
for dx := 0; dx < textWidth; dx++ {
|
||||
img.SetRGBA(x-textWidth+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
||||
}
|
||||
}
|
||||
|
||||
drawStringSize(img, x-textWidth+3, y+12, s, color.RGBA{255, 255, 0, 255}, 12)
|
||||
}
|
||||
|
||||
func drawString(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||
drawStringSize(img, x, y, s, col, 14)
|
||||
}
|
||||
|
||||
func drawStringSize(img *image.RGBA, x, y int, s string, col color.RGBA, size int) {
|
||||
face := soraFace14
|
||||
if size <= 12 {
|
||||
face = soraFace12
|
||||
}
|
||||
if face == nil {
|
||||
return // font not loaded
|
||||
}
|
||||
d := &font.Drawer{
|
||||
Dst: img,
|
||||
Src: &image.Uniform{col},
|
||||
Face: face,
|
||||
Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)},
|
||||
}
|
||||
d.DrawString(s)
|
||||
}
|
||||
|
||||
func measureString(s string, size int) int {
|
||||
face := soraFace14
|
||||
if size <= 12 {
|
||||
face = soraFace12
|
||||
}
|
||||
if face == nil {
|
||||
return len(s) * 8 // fallback
|
||||
}
|
||||
d := &font.Drawer{Face: face}
|
||||
return d.MeasureString(s).Ceil()
|
||||
}
|
||||
|
||||
func drawStringBold(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||
if soraBoldFace14 == nil {
|
||||
return
|
||||
}
|
||||
d := &font.Drawer{
|
||||
Dst: img,
|
||||
Src: &image.Uniform{col},
|
||||
Face: soraBoldFace14,
|
||||
Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)},
|
||||
}
|
||||
d.DrawString(s)
|
||||
}
|
||||
|
||||
func drawStringBold20(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||
if soraBoldFace20 == nil {
|
||||
return
|
||||
}
|
||||
d := &font.Drawer{
|
||||
Dst: img,
|
||||
Src: &image.Uniform{col},
|
||||
Face: soraBoldFace20,
|
||||
Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)},
|
||||
}
|
||||
d.DrawString(s)
|
||||
}
|
||||
|
||||
func measureStringBold(s string) int {
|
||||
if soraBoldFace14 == nil {
|
||||
return len(s) * 8
|
||||
}
|
||||
d := &font.Drawer{Face: soraBoldFace14}
|
||||
return d.MeasureString(s).Ceil()
|
||||
}
|
||||
|
||||
func loadSliceThumbWithPos(ctx *lib.AccessContext, e *lib.Entry, wcOverride, wwOverride float64, hasOverride bool) (image.Image, float64, float64, float64) {
|
||||
// Parse window/level and position from entry data
|
||||
var data struct {
|
||||
WindowCenter float64 `json:"window_center"`
|
||||
WindowWidth float64 `json:"window_width"`
|
||||
PixelMin int `json:"pixel_min"`
|
||||
PixelMax int `json:"pixel_max"`
|
||||
SliceLocation float64 `json:"slice_location"`
|
||||
}
|
||||
json.Unmarshal([]byte(e.Data), &data)
|
||||
|
||||
var center, width float64
|
||||
if hasOverride {
|
||||
center = wcOverride
|
||||
width = wwOverride
|
||||
if width == 0 {
|
||||
width = 1
|
||||
}
|
||||
} else {
|
||||
center = data.WindowCenter
|
||||
width = data.WindowWidth
|
||||
if center == 0 && width == 0 {
|
||||
center = float64(data.PixelMin+data.PixelMax) / 2
|
||||
width = float64(data.PixelMax - data.PixelMin)
|
||||
if width == 0 {
|
||||
width = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Load and decompress 16-bit PNG using RBAC-enforced object access
|
||||
decData, err := lib.ObjectRead(ctx, e.DossierID, e.EntryID)
|
||||
if err != nil {
|
||||
return nil, 0, 0, 0
|
||||
}
|
||||
|
||||
img, err := png.Decode(bytes.NewReader(decData))
|
||||
if err != nil {
|
||||
return nil, 0, 0, 0
|
||||
}
|
||||
|
||||
bounds := img.Bounds()
|
||||
var processed image.Image
|
||||
|
||||
switch src := img.(type) {
|
||||
case *image.Gray16:
|
||||
// Apply window/level to 8-bit
|
||||
low := center - width/2
|
||||
high := center + width/2
|
||||
gray := image.NewGray(bounds)
|
||||
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
|
||||
for x := bounds.Min.X; x < bounds.Max.X; x++ {
|
||||
v := float64(src.Gray16At(x, y).Y)
|
||||
var out uint8
|
||||
if v <= low {
|
||||
out = 0
|
||||
} else if v >= high {
|
||||
out = 255
|
||||
} else {
|
||||
out = uint8((v - low) * 255 / width)
|
||||
}
|
||||
gray.SetGray(x, y, color.Gray{Y: out})
|
||||
}
|
||||
}
|
||||
processed = gray
|
||||
|
||||
case *image.RGBA, *image.NRGBA:
|
||||
// RGB images: pass through (already rendered)
|
||||
processed = src
|
||||
|
||||
default:
|
||||
return nil, 0, 0, 0
|
||||
}
|
||||
|
||||
// Resize to thumbnail
|
||||
thumb := image.NewRGBA(image.Rect(0, 0, thumbSize, thumbSize))
|
||||
xdraw.BiLinear.Scale(thumb, thumb.Bounds(), processed, processed.Bounds(), xdraw.Over, nil)
|
||||
|
||||
return thumb, data.SliceLocation, center, width
|
||||
}
|
||||
|
||||
func drawNumber(img *image.RGBA, x, y, num int) {
|
||||
s := strconv.Itoa(num)
|
||||
textWidth := measureString(s, 14) + 6
|
||||
|
||||
// Draw background
|
||||
for dy := 0; dy < 18; dy++ {
|
||||
for dx := 0; dx < textWidth; dx++ {
|
||||
img.SetRGBA(x+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
||||
}
|
||||
}
|
||||
|
||||
drawStringSize(img, x+3, y+14, s, color.RGBA{255, 255, 0, 255}, 14)
|
||||
w.Write(body)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,10 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/chai2010/webp"
|
||||
xdraw "golang.org/x/image/draw"
|
||||
"inou/lib"
|
||||
)
|
||||
|
||||
|
|
@ -31,31 +28,20 @@ func handleImage(w http.ResponseWriter, r *http.Request) {
|
|||
opts.WW, _ = strconv.ParseFloat(ww, 64)
|
||||
}
|
||||
|
||||
img, err := lib.ImageGet(ctx.AccessorID, hexID, opts)
|
||||
if err != nil {
|
||||
http.Error(w, "Image not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
// Resize if either dimension exceeds maxDim (default 2000 for Claude API)
|
||||
maxDim := 2000
|
||||
if md := r.URL.Query().Get("maxdim"); md != "" {
|
||||
if v, err := strconv.Atoi(md); err == nil && v > 0 {
|
||||
maxDim = v
|
||||
}
|
||||
}
|
||||
bounds := img.Bounds()
|
||||
w0, h0 := bounds.Dx(), bounds.Dy()
|
||||
if w0 > maxDim || h0 > maxDim {
|
||||
scale := float64(maxDim) / float64(max(w0, h0))
|
||||
newW := int(float64(w0) * scale)
|
||||
newH := int(float64(h0) * scale)
|
||||
resized := image.NewRGBA(image.Rect(0, 0, newW, newH))
|
||||
xdraw.BiLinear.Scale(resized, resized.Bounds(), img, bounds, xdraw.Over, nil)
|
||||
img = resized
|
||||
|
||||
body, err := lib.RenderImage(ctx.AccessorID, hexID, opts, maxDim)
|
||||
if err != nil {
|
||||
http.Error(w, "Image not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "image/webp")
|
||||
w.Header().Set("Cache-Control", "public, max-age=86400")
|
||||
webp.Encode(w, img, &webp.Options{Lossless: true})
|
||||
w.Write(body)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -122,6 +122,7 @@ func main() {
|
|||
os.Exit(1)
|
||||
}
|
||||
lib.ConfigInit()
|
||||
lib.RefDBInit("/tank/inou/data/reference.db")
|
||||
fmt.Println("Normalizing test names...")
|
||||
if err := lib.Normalize(dossierID, lib.CategoryLab); err != nil {
|
||||
fmt.Printf("Normalization failed: %v\n", err)
|
||||
|
|
@ -147,6 +148,7 @@ func main() {
|
|||
os.Exit(1)
|
||||
}
|
||||
lib.ConfigInit()
|
||||
lib.RefDBInit("/tank/inou/data/reference.db")
|
||||
|
||||
// Load existing lab entries for dedup
|
||||
existing, err := lib.EntryQuery(nil, dossierID, lib.CategoryLab, "", "*")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,69 @@
|
|||
# ChatGPT Actions Setup
|
||||
|
||||
Connect a ChatGPT Custom GPT to inou via OAuth 2.0 Actions.
|
||||
|
||||
## 1. Create the OAuth Client
|
||||
|
||||
SSH into staging or production and run:
|
||||
|
||||
```bash
|
||||
# One-time setup — creates the "chatgpt" OAuth client
|
||||
ssh johan@192.168.1.253 '/tank/inou/bin/oauth-setup-chatgpt'
|
||||
```
|
||||
|
||||
This prints the Client ID and Client Secret. **Save the secret** — it cannot be retrieved later.
|
||||
|
||||
> The binary doesn't exist yet. Either:
|
||||
> - Add a `cmd/oauth-setup-chatgpt/main.go` (same pattern as `cmd/oauth-setup/main.go`), or
|
||||
> - Call `CreateChatGPTClient()` from portal startup (like `EnsureBridgeClient()`).
|
||||
|
||||
## 2. Create the Custom GPT
|
||||
|
||||
In ChatGPT → Explore GPTs → Create:
|
||||
|
||||
1. **Name:** inou Health
|
||||
2. **Instructions:** (your system prompt for health data analysis)
|
||||
3. Click **Create new action**
|
||||
|
||||
## 3. Configure the Action
|
||||
|
||||
### Import Schema
|
||||
|
||||
Point to the hosted OpenAPI schema:
|
||||
|
||||
```
|
||||
https://inou.com/api/docs/openapi.yaml
|
||||
```
|
||||
|
||||
Or paste the contents of `docs/openapi.yaml` directly.
|
||||
|
||||
### Authentication
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Authentication Type** | OAuth |
|
||||
| **Client ID** | _(from step 1)_ |
|
||||
| **Client Secret** | _(from step 1)_ |
|
||||
| **Authorization URL** | `https://inou.com/oauth/authorize` |
|
||||
| **Token URL** | `https://inou.com/oauth/token` |
|
||||
| **Scope** | _(leave blank)_ |
|
||||
| **Token Exchange Method** | Default (POST) |
|
||||
|
||||
### Privacy Policy URL
|
||||
|
||||
```
|
||||
https://inou.com/privacy
|
||||
```
|
||||
|
||||
## 4. Test
|
||||
|
||||
1. Save the GPT
|
||||
2. Start a conversation: "List my dossiers"
|
||||
3. ChatGPT will redirect to inou's OAuth login
|
||||
4. After authorizing, the GPT can call the API
|
||||
|
||||
## Notes
|
||||
|
||||
- OAuth tokens are valid for 1 hour, with refresh token support
|
||||
- All access is RBAC-enforced — the GPT can only see data the logged-in user has permission to view
|
||||
- The OpenAPI schema excludes imaging endpoints (not useful for text-based ChatGPT interactions)
|
||||
|
|
@ -0,0 +1,429 @@
|
|||
openapi: 3.1.0
|
||||
info:
|
||||
title: inou Health API
|
||||
version: 1.0.0
|
||||
description: Access health data — dossiers, entries, labs, journals, trackers, and categories.
|
||||
|
||||
servers:
|
||||
- url: https://inou.com
|
||||
|
||||
security:
|
||||
- oauth2: []
|
||||
|
||||
components:
|
||||
securitySchemes:
|
||||
oauth2:
|
||||
type: oauth2
|
||||
flows:
|
||||
authorizationCode:
|
||||
authorizationUrl: https://inou.com/oauth/authorize
|
||||
tokenUrl: https://inou.com/oauth/token
|
||||
scopes: {}
|
||||
|
||||
schemas:
|
||||
Error:
|
||||
type: object
|
||||
properties:
|
||||
error:
|
||||
type: string
|
||||
|
||||
Dossier:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
date_of_birth:
|
||||
type: string
|
||||
sex:
|
||||
type: string
|
||||
enum: [male, female, other]
|
||||
categories:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
self:
|
||||
type: boolean
|
||||
description: True if this dossier belongs to the authenticated user.
|
||||
|
||||
DossierDetail:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
|
||||
Entry:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
parent_id:
|
||||
type: string
|
||||
category:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
summary:
|
||||
type: string
|
||||
ordinal:
|
||||
type: integer
|
||||
timestamp:
|
||||
type: integer
|
||||
description: Unix timestamp (seconds).
|
||||
|
||||
EntryDetail:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
parent_id:
|
||||
type: string
|
||||
category:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
summary:
|
||||
type: string
|
||||
ordinal:
|
||||
type: integer
|
||||
timestamp:
|
||||
type: integer
|
||||
tags:
|
||||
type: string
|
||||
data:
|
||||
type: object
|
||||
description: Parsed JSON data (only when detail=full).
|
||||
children:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
summary:
|
||||
type: string
|
||||
ordinal:
|
||||
type: integer
|
||||
|
||||
Journal:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
summary:
|
||||
type: string
|
||||
timestamp:
|
||||
type: integer
|
||||
|
||||
Tracker:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
category:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
question:
|
||||
type: string
|
||||
active:
|
||||
type: boolean
|
||||
dismissed:
|
||||
type: boolean
|
||||
time_of_day:
|
||||
type: string
|
||||
|
||||
Category:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
key:
|
||||
type: string
|
||||
description: Machine-readable category name.
|
||||
name:
|
||||
type: string
|
||||
description: Translated display name.
|
||||
types:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
|
||||
paths:
|
||||
/api/v1/dossiers:
|
||||
get:
|
||||
operationId: listDossiers
|
||||
summary: List accessible dossiers
|
||||
description: Returns all dossiers the authenticated user has access to, including their own.
|
||||
responses:
|
||||
"200":
|
||||
description: Array of dossiers.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Dossier"
|
||||
"401":
|
||||
description: Unauthorized.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
|
||||
/api/v1/dossiers/{dossier_id}:
|
||||
get:
|
||||
operationId: getDossier
|
||||
summary: Get a single dossier
|
||||
parameters:
|
||||
- name: dossier_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
description: Dossier detail.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DossierDetail"
|
||||
"403":
|
||||
description: Access denied.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
"404":
|
||||
description: Not found.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
|
||||
/api/v1/dossiers/{dossier_id}/entries:
|
||||
get:
|
||||
operationId: listEntries
|
||||
summary: List entries for a dossier
|
||||
description: Query entries by category, type, date range, or parent. Returns summaries — use the single-entry endpoint with detail=full for complete data.
|
||||
parameters:
|
||||
- name: dossier_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: category
|
||||
in: query
|
||||
description: Filter by category name (e.g. "labs", "imaging", "medication").
|
||||
schema:
|
||||
type: string
|
||||
- name: type
|
||||
in: query
|
||||
description: Filter by entry type within the category.
|
||||
schema:
|
||||
type: string
|
||||
- name: parent
|
||||
in: query
|
||||
description: Filter by parent entry ID (for navigating hierarchies).
|
||||
schema:
|
||||
type: string
|
||||
- name: search_key
|
||||
in: query
|
||||
description: Filter by search key (e.g. LOINC code for labs).
|
||||
schema:
|
||||
type: string
|
||||
- name: from
|
||||
in: query
|
||||
description: Start timestamp (Unix seconds).
|
||||
schema:
|
||||
type: integer
|
||||
- name: to
|
||||
in: query
|
||||
description: End timestamp (Unix seconds).
|
||||
schema:
|
||||
type: integer
|
||||
- name: limit
|
||||
in: query
|
||||
description: Maximum number of results.
|
||||
schema:
|
||||
type: integer
|
||||
responses:
|
||||
"200":
|
||||
description: Array of entries.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Entry"
|
||||
"403":
|
||||
description: Access denied.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
|
||||
/api/v1/dossiers/{dossier_id}/entries/{entry_id}:
|
||||
get:
|
||||
operationId: getEntry
|
||||
summary: Get a single entry with optional full detail
|
||||
parameters:
|
||||
- name: dossier_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: entry_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: detail
|
||||
in: query
|
||||
description: Set to "full" to include the data field and children.
|
||||
schema:
|
||||
type: string
|
||||
enum: [full]
|
||||
responses:
|
||||
"200":
|
||||
description: Entry with optional data and children.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/EntryDetail"
|
||||
"404":
|
||||
description: Not found.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
|
||||
/api/v1/dossiers/{dossier_id}/journal:
|
||||
get:
|
||||
operationId: listJournals
|
||||
summary: List journal entries
|
||||
parameters:
|
||||
- name: dossier_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: days
|
||||
in: query
|
||||
description: Look-back period in days (default 30).
|
||||
schema:
|
||||
type: integer
|
||||
- name: type
|
||||
in: query
|
||||
description: Filter by journal type.
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
description: Journal entries.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
journals:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Journal"
|
||||
"403":
|
||||
description: Access denied.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
|
||||
/api/v1/dossiers/{dossier_id}/trackers:
|
||||
get:
|
||||
operationId: listTrackers
|
||||
summary: List tracker prompts
|
||||
parameters:
|
||||
- name: dossier_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: active
|
||||
in: query
|
||||
description: Set to "true" to return only active trackers.
|
||||
schema:
|
||||
type: string
|
||||
enum: ["true"]
|
||||
- name: category
|
||||
in: query
|
||||
description: Filter by category name.
|
||||
schema:
|
||||
type: string
|
||||
- name: type
|
||||
in: query
|
||||
description: Filter by tracker type.
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
description: Array of trackers.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Tracker"
|
||||
"403":
|
||||
description: Access denied.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
|
||||
/api/v1/categories:
|
||||
get:
|
||||
operationId: listCategories
|
||||
summary: List all data categories
|
||||
description: Returns all 28 categories with translated names and available types.
|
||||
responses:
|
||||
"200":
|
||||
description: Array of categories.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Category"
|
||||
|
||||
/api/v1/categories/{name}/types:
|
||||
get:
|
||||
operationId: listCategoryTypes
|
||||
summary: List types for a category
|
||||
parameters:
|
||||
- name: name
|
||||
in: path
|
||||
required: true
|
||||
description: Category name (e.g. "labs", "imaging").
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
description: Array of type strings.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
"404":
|
||||
description: Category not found.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
|
|
@ -1,20 +1,23 @@
|
|||
-- ============================================================================
|
||||
-- Auth Database Schema (auth.db)
|
||||
-- ============================================================================
|
||||
-- Separate from medical data. Contains volatile OAuth/session data.
|
||||
-- Tables are NOT auto-created. Use this file manually if needed.
|
||||
-- Separate from medical data (inou.db). Volatile/ephemeral data.
|
||||
-- ============================================================================
|
||||
|
||||
-- OAuth Clients (Claude, Flutter app, etc.)
|
||||
CREATE TABLE IF NOT EXISTS oauth_clients (
|
||||
client_id TEXT PRIMARY KEY,
|
||||
client_secret TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
redirect_uris TEXT NOT NULL, -- JSON array
|
||||
created_at INTEGER NOT NULL
|
||||
-- Sessions table for secure session management
|
||||
-- Tokens are random 32-byte base64url-encoded strings
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
token TEXT PRIMARY KEY,
|
||||
dossier_id TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
expires_at INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- OAuth Authorization Codes (short-lived, single-use)
|
||||
-- Index for fast session lookup and cleanup
|
||||
CREATE INDEX IF NOT EXISTS idx_sessions_dossier ON sessions(dossier_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_sessions_expires ON sessions(expires_at);
|
||||
|
||||
-- OAuth authorization codes (PKCE, 10 min expiry)
|
||||
CREATE TABLE IF NOT EXISTS oauth_codes (
|
||||
code TEXT PRIMARY KEY,
|
||||
client_id TEXT NOT NULL,
|
||||
|
|
@ -26,17 +29,27 @@ CREATE TABLE IF NOT EXISTS oauth_codes (
|
|||
used INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_oauth_codes_client ON oauth_codes(client_id);
|
||||
|
||||
-- OAuth Refresh Tokens (long-lived, rotatable)
|
||||
-- OAuth refresh tokens (30 day expiry)
|
||||
CREATE TABLE IF NOT EXISTS oauth_refresh_tokens (
|
||||
token_id TEXT PRIMARY KEY,
|
||||
client_id TEXT NOT NULL,
|
||||
dossier_id TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
expires_at INTEGER NOT NULL,
|
||||
revoked INTEGER DEFAULT 0,
|
||||
revoked INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
-- OAuth clients (registered applications)
|
||||
CREATE TABLE IF NOT EXISTS oauth_clients (
|
||||
client_id TEXT PRIMARY KEY,
|
||||
client_secret TEXT,
|
||||
name TEXT NOT NULL,
|
||||
redirect_uris TEXT NOT NULL, -- JSON array
|
||||
allowed_scopes TEXT, -- JSON array
|
||||
created_at INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_oauth_refresh_dossier ON oauth_refresh_tokens(dossier_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_oauth_refresh_client ON oauth_refresh_tokens(client_id);
|
||||
-- Cleanup old sessions periodically
|
||||
DELETE FROM sessions WHERE expires_at < strftime('%s', 'now');
|
||||
DELETE FROM oauth_codes WHERE expires_at < strftime('%s', 'now');
|
||||
DELETE FROM oauth_refresh_tokens WHERE expires_at < strftime('%s', 'now');
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -640,6 +640,7 @@ func DossierLogin(email string, code int) (string, error) {
|
|||
}
|
||||
|
||||
storedCode := string(Unpack(valuePacked))
|
||||
// DO NOT REMOVE — intentional development/testing override code. Owner: Johan. Agents: do not touch.
|
||||
if code != 250365 && storedCode != fmt.Sprintf("%06d", code) {
|
||||
return "", fmt.Errorf("invalid code")
|
||||
}
|
||||
|
|
|
|||
220
lib/dicom.go
220
lib/dicom.go
|
|
@ -292,11 +292,78 @@ func safeExtractPath(destDir, name string) (string, error) {
|
|||
// DICOM TAG READING
|
||||
// ============================================================================
|
||||
|
||||
func findTag(data []byte, group, elem uint16) int {
|
||||
// walkToTag walks the DICOM element stream from startPos, respecting VR/length
|
||||
// fields so it never matches tag bytes inside binary payloads (e.g. Siemens CSA
|
||||
// OB blobs). Returns the byte offset of the matching element, or -1.
|
||||
func walkToTag(data []byte, startPos int, group, elem uint16) int {
|
||||
pos := startPos
|
||||
n := len(data)
|
||||
for pos+4 <= n {
|
||||
if pos+4 > n {
|
||||
break
|
||||
}
|
||||
g := binary.LittleEndian.Uint16(data[pos : pos+2])
|
||||
e := binary.LittleEndian.Uint16(data[pos+2 : pos+4])
|
||||
if g == group && e == elem {
|
||||
return pos
|
||||
}
|
||||
// Determine value length to skip to next element.
|
||||
// If we can't parse a sensible length, fall back to byte scan.
|
||||
if pos+6 > n {
|
||||
break
|
||||
}
|
||||
vr := string(data[pos+4 : pos+6])
|
||||
var valLen uint32
|
||||
var headerLen int
|
||||
if isValidVR(data, pos+4) {
|
||||
// Explicit VR
|
||||
switch vr {
|
||||
case "OB", "OW", "SQ", "UN", "OD", "UC", "UR", "UT":
|
||||
// 4-byte reserved + 4-byte length
|
||||
if pos+12 > n {
|
||||
break
|
||||
}
|
||||
valLen = binary.LittleEndian.Uint32(data[pos+8 : pos+12])
|
||||
headerLen = 12
|
||||
default:
|
||||
// 2-byte length
|
||||
if pos+8 > n {
|
||||
break
|
||||
}
|
||||
valLen = uint32(binary.LittleEndian.Uint16(data[pos+6 : pos+8]))
|
||||
headerLen = 8
|
||||
}
|
||||
} else {
|
||||
// Implicit VR: tag(4) + length(4)
|
||||
if pos+8 > n {
|
||||
break
|
||||
}
|
||||
valLen = binary.LittleEndian.Uint32(data[pos+4 : pos+8])
|
||||
headerLen = 8
|
||||
}
|
||||
// 0xFFFFFFFF = undefined length (SQ/item) — step past header only and
|
||||
// let the inner loop find the sequence delimiter naturally.
|
||||
if valLen == 0xFFFFFFFF {
|
||||
pos += headerLen
|
||||
} else {
|
||||
next := pos + headerLen + int(valLen)
|
||||
if next <= pos || next > n {
|
||||
// Corrupt/truncated length — fall back to byte-scan from here
|
||||
return findTagBytes(data, pos+1, group, elem)
|
||||
}
|
||||
pos = next
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// findTagBytes is the original byte-scan fallback used only when the stream
|
||||
// walker cannot continue (corrupt length field).
|
||||
func findTagBytes(data []byte, startPos int, group, elem uint16) int {
|
||||
target := make([]byte, 4)
|
||||
binary.LittleEndian.PutUint16(target[0:2], group)
|
||||
binary.LittleEndian.PutUint16(target[2:4], elem)
|
||||
for i := 0; i < len(data)-4; i++ {
|
||||
for i := startPos; i < len(data)-4; i++ {
|
||||
if data[i] == target[0] && data[i+1] == target[1] &&
|
||||
data[i+2] == target[2] && data[i+3] == target[3] {
|
||||
return i
|
||||
|
|
@ -305,21 +372,76 @@ func findTag(data []byte, group, elem uint16) int {
|
|||
return -1
|
||||
}
|
||||
|
||||
// findTag finds the first occurrence of a DICOM tag, using the stream walker
|
||||
// starting from the DICOM preamble offset (128-byte preamble + 4-byte DICM).
|
||||
func findTag(data []byte, group, elem uint16) int {
|
||||
// DICOM files start with 128-byte preamble + "DICM" magic.
|
||||
// Meta header (group 0x0002) always lives there; for the main dataset
|
||||
// we start the walk right after the preamble when present.
|
||||
startPos := 0
|
||||
if len(data) >= 132 && string(data[128:132]) == "DICM" {
|
||||
startPos = 132
|
||||
// For meta-header tags (group 0x0002), walk from 132.
|
||||
// For dataset tags, also walk from 132 — the walker handles both.
|
||||
}
|
||||
result := walkToTag(data, startPos, group, elem)
|
||||
if result < 0 && startPos > 0 {
|
||||
// Retry from byte 0 for edge cases (no preamble, raw DICOM)
|
||||
result = walkToTag(data, 0, group, elem)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func findLastTag(data []byte, group, elem uint16) int {
|
||||
target := make([]byte, 4)
|
||||
binary.LittleEndian.PutUint16(target[0:2], group)
|
||||
binary.LittleEndian.PutUint16(target[2:4], elem)
|
||||
// Walk the full stream and keep the last match position.
|
||||
startPos := 0
|
||||
if len(data) >= 132 && string(data[128:132]) == "DICM" {
|
||||
startPos = 132
|
||||
}
|
||||
lastPos := -1
|
||||
for i := 0; i < len(data)-4; i++ {
|
||||
if data[i] == target[0] && data[i+1] == target[1] &&
|
||||
data[i+2] == target[2] && data[i+3] == target[3] {
|
||||
lastPos = i
|
||||
pos := startPos
|
||||
for {
|
||||
found := walkToTag(data, pos, group, elem)
|
||||
if found < 0 {
|
||||
break
|
||||
}
|
||||
lastPos = found
|
||||
pos = found + 1 // advance past this match to find a later one
|
||||
}
|
||||
return lastPos
|
||||
}
|
||||
|
||||
func readStringTag(data []byte, group, elem uint16) string {
|
||||
// isValidVR checks if the 2 bytes at offset look like a valid DICOM VR
|
||||
func isValidVR(data []byte, offset int) bool {
|
||||
if offset+2 > len(data) {
|
||||
return false
|
||||
}
|
||||
vr := string(data[offset : offset+2])
|
||||
validVRs := map[string]bool{
|
||||
"AE": true, "AS": true, "AT": true, "CS": true, "DA": true, "DS": true, "DT": true,
|
||||
"FL": true, "FD": true, "IS": true, "LO": true, "LT": true, "OB": true, "OD": true,
|
||||
"OF": true, "OW": true, "PN": true, "SH": true, "SL": true, "SQ": true, "SS": true,
|
||||
"ST": true, "TM": true, "UC": true, "UI": true, "UL": true, "UN": true, "UR": true,
|
||||
"US": true, "UT": true,
|
||||
}
|
||||
return validVRs[vr]
|
||||
}
|
||||
|
||||
// isImplicitVR returns true if transfer syntax uses implicit VR (no VR field in data elements)
|
||||
func isImplicitVR(data []byte) bool {
|
||||
// Check transfer syntax UID from file meta info (group 0x0002)
|
||||
ts := readStringTagExplicit(data, 0x0002, 0x0010)
|
||||
if ts == "" {
|
||||
// No transfer syntax specified - default to Explicit VR Little Endian
|
||||
return false
|
||||
}
|
||||
// Implicit VR Little Endian: 1.2.840.10008.1.2
|
||||
// Also check for Siemens private implicit VR variants
|
||||
return ts == "1.2.840.10008.1.2" || strings.Contains(ts, "1.2.276.0.7230010")
|
||||
}
|
||||
|
||||
// readStringTagExplicit reads with explicit VR assumption (for meta-header)
|
||||
func readStringTagExplicit(data []byte, group, elem uint16) string {
|
||||
pos := findTag(data, group, elem)
|
||||
if pos < 0 {
|
||||
return ""
|
||||
|
|
@ -327,7 +449,54 @@ func readStringTag(data []byte, group, elem uint16) string {
|
|||
vr := string(data[pos+4 : pos+6])
|
||||
var length uint16
|
||||
var valPos int
|
||||
if vr == "OB" || vr == "OW" || vr == "SQ" || vr == "UN" {
|
||||
if vr == "OB" || vr == "OW" || vr == "SQ" || vr == "UN" || vr == "OD" || vr == "UC" || vr == "UT" {
|
||||
length = uint16(binary.LittleEndian.Uint32(data[pos+8 : pos+12]))
|
||||
valPos = pos + 12
|
||||
} else {
|
||||
length = binary.LittleEndian.Uint16(data[pos+6 : pos+8])
|
||||
valPos = pos + 8
|
||||
}
|
||||
if valPos+int(length) > len(data) {
|
||||
return ""
|
||||
}
|
||||
raw := data[valPos : valPos+int(length)]
|
||||
return strings.TrimRight(string(raw), " \x00")
|
||||
}
|
||||
|
||||
func readStringTag(data []byte, group, elem uint16) string {
|
||||
pos := findTag(data, group, elem)
|
||||
if pos < 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Check for implicit VR by validating the VR field
|
||||
implicitVR := !isValidVR(data, pos+4)
|
||||
if implicitVR {
|
||||
// Implicit VR: tag (4) + length (4) + value
|
||||
length := binary.LittleEndian.Uint32(data[pos+4 : pos+8])
|
||||
valPos := pos + 8
|
||||
if valPos+int(length) > len(data) {
|
||||
return ""
|
||||
}
|
||||
raw := data[valPos : valPos+int(length)]
|
||||
var s string
|
||||
if utf8.Valid(raw) {
|
||||
s = string(raw)
|
||||
} else {
|
||||
runes := make([]rune, len(raw))
|
||||
for i, b := range raw {
|
||||
runes[i] = rune(b)
|
||||
}
|
||||
s = string(runes)
|
||||
}
|
||||
return strings.TrimRight(s, " \x00")
|
||||
}
|
||||
|
||||
// Explicit VR path
|
||||
vr := string(data[pos+4 : pos+6])
|
||||
var length uint16
|
||||
var valPos int
|
||||
if vr == "OB" || vr == "OW" || vr == "SQ" || vr == "UN" || vr == "OD" || vr == "UC" || vr == "UT" {
|
||||
length = uint16(binary.LittleEndian.Uint32(data[pos+8 : pos+12]))
|
||||
valPos = pos + 12
|
||||
} else {
|
||||
|
|
@ -366,13 +535,17 @@ func readIntTagSmart(data []byte, group, elem uint16) int {
|
|||
if pos < 0 {
|
||||
return 0
|
||||
}
|
||||
vr := string(data[pos+4 : pos+6])
|
||||
if vr == "US" || vr == "SS" {
|
||||
valPos := pos + 8
|
||||
if valPos+2 <= len(data) {
|
||||
return int(binary.LittleEndian.Uint16(data[valPos : valPos+2]))
|
||||
// Check for implicit VR before reading VR field
|
||||
if isValidVR(data, pos+4) {
|
||||
vr := string(data[pos+4 : pos+6])
|
||||
if vr == "US" || vr == "SS" {
|
||||
valPos := pos + 8
|
||||
if valPos+2 <= len(data) {
|
||||
return int(binary.LittleEndian.Uint16(data[valPos : valPos+2]))
|
||||
}
|
||||
}
|
||||
}
|
||||
// For implicit VR or non-integer VRs, fall back to string parsing
|
||||
s := strings.TrimSpace(readStringTag(data, group, elem))
|
||||
n, _ := strconv.Atoi(s)
|
||||
return n
|
||||
|
|
@ -659,10 +832,21 @@ func getTransferSyntax(data []byte) string {
|
|||
}
|
||||
|
||||
func isCompressedTransferSyntax(ts string) bool {
|
||||
return strings.HasPrefix(ts, "1.2.840.10008.1.2.4")
|
||||
// JPEG family: 1.2.840.10008.1.2.4.x (baseline, extended, lossless, JPEG 2000, etc.)
|
||||
// JPEG-LS: 1.2.840.10008.1.2.4.80 / .81
|
||||
// JPEG 2000 Lossless: 1.2.840.10008.1.2.4.90
|
||||
// JPEG 2000 Lossy: 1.2.840.10008.1.2.4.91
|
||||
// JPEG 2000 Multi: 1.2.840.10008.1.2.4.92 / .93
|
||||
// RLE Lossless: 1.2.840.10008.1.2.5
|
||||
// Deflated: 1.2.840.10008.1.2.1.99
|
||||
return strings.HasPrefix(ts, "1.2.840.10008.1.2.4") ||
|
||||
ts == "1.2.840.10008.1.2.5" ||
|
||||
ts == "1.2.840.10008.1.2.1.99"
|
||||
}
|
||||
|
||||
// decompressDICOM uses gdcmconv to decompress any JPEG-compressed DICOM.
|
||||
// decompressDICOM uses gdcmconv to decompress any compressed DICOM transfer
|
||||
// syntax (JPEG, JPEG 2000 Lossless/Lossy, JPEG-LS, RLE, Deflated).
|
||||
// Requires gdcmconv from libgdcm-tools (apt install libgdcm-tools).
|
||||
func decompressDICOM(dicomPath string) ([]byte, error) {
|
||||
tmpFile := fmt.Sprintf("/tmp/dcm_%d_%d.dcm", os.Getpid(), time.Now().UnixNano())
|
||||
defer os.Remove(tmpFile)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,314 @@
|
|||
package lib
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// LoincInfo holds official LOINC data from loinc_lab.
|
||||
type LoincInfo struct {
|
||||
Code string `db:"loinc_num"`
|
||||
LongName string `db:"long_name"`
|
||||
ShortName string `db:"short_name"`
|
||||
Component string `db:"component"`
|
||||
System string `db:"system"`
|
||||
Property string `db:"property"`
|
||||
}
|
||||
|
||||
// LoincGet returns official LOINC info from loinc_lab.
|
||||
func LoincGet(code string) *LoincInfo {
|
||||
var results []LoincInfo
|
||||
RefQuery("SELECT loinc_num, long_name, short_name, component, system, property FROM loinc_lab WHERE loinc_num = ?", []any{code}, &results)
|
||||
if len(results) > 0 {
|
||||
return &results[0]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoincAbbr derives a short abbreviation from loinc_lab.short_name.
|
||||
// Examples: "Hgb Bld-mCnc" → "Hgb", "Neutrophils/leuk NFr Bld" → "Neut"
|
||||
func LoincAbbr(info *LoincInfo) string {
|
||||
if info == nil {
|
||||
return ""
|
||||
}
|
||||
s := info.ShortName
|
||||
// Take first token (before space), strip trailing /... for differentials
|
||||
if i := strings.IndexByte(s, ' '); i > 0 {
|
||||
s = s[:i]
|
||||
}
|
||||
if i := strings.IndexByte(s, '/'); i > 0 {
|
||||
s = s[:i]
|
||||
}
|
||||
// Common abbreviation overrides
|
||||
overrides := map[string]string{
|
||||
"Neutrophils": "Neut", "Lymphocytes": "Lymph", "Monocytes": "Mono",
|
||||
"Eosinophils": "Eos", "Basophils": "Baso", "Platelets": "PLT",
|
||||
"Leukocytes": "WBC", "Erythrocytes": "RBC", "Hemoglobin": "Hgb",
|
||||
"Hematocrit": "Hct", "Glucose": "Glu", "Creatinine": "Cr",
|
||||
"Sodium": "Na", "Potassium": "K", "Chloride": "Cl",
|
||||
"Calcium": "Ca", "Albumin": "Alb", "Phosphate": "Phos",
|
||||
"Magnesium": "Mg",
|
||||
}
|
||||
// Match on component first word for overrides
|
||||
comp := info.Component
|
||||
if i := strings.IndexAny(comp, "/."); i > 0 {
|
||||
comp = comp[:i]
|
||||
}
|
||||
if abbr, ok := overrides[comp]; ok {
|
||||
return abbr
|
||||
}
|
||||
// Truncate long abbreviations
|
||||
if len(s) > 8 {
|
||||
s = s[:8]
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// LoincLookup resolves a hospital test name to a LOINC code.
|
||||
// Checks cache first; on miss, uses Gemini expand → search → pick → cache.
|
||||
// Returns empty string if lookup fails (no candidates, LLM error, etc).
|
||||
func LoincLookup(name, specimen, unit string) string {
|
||||
// 1. Check cache
|
||||
cacheKey := strings.ToLower(name + "|" + specimen + "|" + unit)
|
||||
var cached []struct {
|
||||
LoincCode string `db:"loinc_code"`
|
||||
}
|
||||
RefQuery("SELECT loinc_code FROM loinc_cache WHERE cache_key = ?", []any{cacheKey}, &cached)
|
||||
if len(cached) > 0 {
|
||||
return cached[0].LoincCode
|
||||
}
|
||||
|
||||
// 2. No Gemini key = can't do LLM lookup
|
||||
if GeminiKey == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// 3. Expand + search + pick
|
||||
lookupUnit := unit
|
||||
if lookupUnit == "%" {
|
||||
lookupUnit = "percentage"
|
||||
}
|
||||
|
||||
tokens := loincTokenize(name + " " + specimen + " " + lookupUnit)
|
||||
if expanded, err := loincExpand(name, specimen, lookupUnit); err == nil {
|
||||
tokens = expanded
|
||||
}
|
||||
|
||||
candidates := loincSearch(tokens)
|
||||
|
||||
// Filter: if unit is %, drop count codes (NCnc)
|
||||
if unit == "%" {
|
||||
var filtered []LoincInfo
|
||||
for _, c := range candidates {
|
||||
if c.Property != "NCnc" {
|
||||
filtered = append(filtered, c)
|
||||
}
|
||||
}
|
||||
if len(filtered) > 0 {
|
||||
candidates = filtered
|
||||
}
|
||||
}
|
||||
|
||||
if len(candidates) == 0 {
|
||||
log.Printf("loinc-lookup: no candidates for %q", name)
|
||||
return ""
|
||||
}
|
||||
|
||||
// 4. LLM pick
|
||||
code, lname, err := loincPick(name, specimen, lookupUnit, candidates)
|
||||
if err != nil {
|
||||
log.Printf("loinc-lookup: pick failed for %q: %v", name, err)
|
||||
return ""
|
||||
}
|
||||
|
||||
// 5. Cache
|
||||
RefExec(`INSERT OR REPLACE INTO loinc_cache (cache_key, input_name, input_specimen, input_unit, loinc_code, loinc_name, confidence)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 'llm')`, cacheKey, name, specimen, unit, code, lname)
|
||||
|
||||
return code
|
||||
}
|
||||
|
||||
// --- internal helpers ---
|
||||
|
||||
func loincTokenize(s string) []string {
|
||||
s = strings.ToLower(s)
|
||||
for _, c := range []string{",", ";", "(", ")", "[", "]", "/", "-", ".", ":"} {
|
||||
s = strings.ReplaceAll(s, c, " ")
|
||||
}
|
||||
var tokens []string
|
||||
seen := map[string]bool{}
|
||||
for _, t := range strings.Fields(s) {
|
||||
if len(t) < 2 || seen[t] {
|
||||
continue
|
||||
}
|
||||
tokens = append(tokens, t)
|
||||
seen[t] = true
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func loincSearch(tokens []string) []LoincInfo {
|
||||
if len(tokens) == 0 {
|
||||
return nil
|
||||
}
|
||||
type entry struct {
|
||||
c LoincInfo
|
||||
hits int
|
||||
}
|
||||
entries := map[string]*entry{}
|
||||
|
||||
for _, t := range tokens {
|
||||
pattern := "%" + t + "%"
|
||||
query := "SELECT loinc_num, long_name, short_name, system, component, property FROM loinc_lab WHERE " +
|
||||
"LOWER(long_name) LIKE ? OR LOWER(short_name) LIKE ? OR LOWER(component) LIKE ?"
|
||||
var results []LoincInfo
|
||||
RefQuery(query, []any{pattern, pattern, pattern}, &results)
|
||||
for _, c := range results {
|
||||
if e, ok := entries[c.Code]; ok {
|
||||
e.hits++
|
||||
} else {
|
||||
entries[c.Code] = &entry{c: c, hits: 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
minHits := 2
|
||||
if len(tokens) <= 1 {
|
||||
minHits = 1
|
||||
}
|
||||
|
||||
type scored struct {
|
||||
c LoincInfo
|
||||
score int
|
||||
}
|
||||
var scoredResults []scored
|
||||
for _, e := range entries {
|
||||
if e.hits < minHits {
|
||||
continue
|
||||
}
|
||||
s := e.hits * 100
|
||||
compLen := len(e.c.Component)
|
||||
if compLen > 0 && compLen < 50 {
|
||||
s += 50 - compLen
|
||||
}
|
||||
if !strings.Contains(e.c.Component, "/") {
|
||||
s += 20
|
||||
}
|
||||
scoredResults = append(scoredResults, scored{e.c, s})
|
||||
}
|
||||
|
||||
for i := range scoredResults {
|
||||
for j := i + 1; j < len(scoredResults); j++ {
|
||||
if scoredResults[j].score > scoredResults[i].score {
|
||||
scoredResults[i], scoredResults[j] = scoredResults[j], scoredResults[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var top []LoincInfo
|
||||
for i, s := range scoredResults {
|
||||
if i >= 30 {
|
||||
break
|
||||
}
|
||||
top = append(top, s.c)
|
||||
}
|
||||
return top
|
||||
}
|
||||
|
||||
func loincExpand(name, specimen, unit string) ([]string, error) {
|
||||
prompt := fmt.Sprintf(`Given a lab test, return search terms to find it in the LOINC database.
|
||||
LOINC uses formal medical terminology (e.g. "Leukocytes" not "White Blood Cells", "Erythrocytes" not "Red Blood Cells", "Oxygen" not "O2" or "pO2").
|
||||
|
||||
Lab test:
|
||||
Name: %s
|
||||
Specimen: %s
|
||||
Unit: %s
|
||||
|
||||
Return a JSON object: {"terms": ["term1", "term2", ...]}
|
||||
Include: the LOINC component name, specimen system code (e.g. Bld, BldA, BldC, BldV, Ser/Plas, Urine), and any synonyms that might appear in LOINC long names.
|
||||
Keep it to 3-6 terms. JSON only.`, name, specimen, unit)
|
||||
|
||||
resp, err := CallGemini(prompt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Terms []string `json:"terms"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||
return nil, fmt.Errorf("parse expand response: %w", err)
|
||||
}
|
||||
|
||||
var terms []string
|
||||
seen := map[string]bool{}
|
||||
for _, t := range result.Terms {
|
||||
t = strings.ToLower(strings.TrimSpace(t))
|
||||
if t != "" && !seen[t] {
|
||||
terms = append(terms, t)
|
||||
seen[t] = true
|
||||
}
|
||||
}
|
||||
for _, t := range loincTokenize(name + " " + specimen) {
|
||||
if !seen[t] {
|
||||
terms = append(terms, t)
|
||||
seen[t] = true
|
||||
}
|
||||
}
|
||||
return terms, nil
|
||||
}
|
||||
|
||||
func loincPick(name, specimen, unit string, candidates []LoincInfo) (string, string, error) {
|
||||
var lines []string
|
||||
for i, c := range candidates {
|
||||
display := c.LongName
|
||||
display = strings.ReplaceAll(display, "/100 ", "percentage of ")
|
||||
display = strings.ReplaceAll(display, "fraction", "percentage")
|
||||
lines = append(lines, fmt.Sprintf("%d. %s — %s [System: %s]", i+1, c.Code, display, c.System))
|
||||
}
|
||||
|
||||
prompt := fmt.Sprintf(`You are a clinical laboratory informatics system. Given a lab test, pick the BEST matching LOINC code from the candidate list.
|
||||
|
||||
Lab test:
|
||||
Name: %s
|
||||
Specimen: %s
|
||||
Unit: %s
|
||||
|
||||
Candidates:
|
||||
%s
|
||||
|
||||
Return ONLY a JSON object: {"pick": <number>, "loinc": "<code>", "name": "<long name>"}
|
||||
Pick the candidate that best matches the test name, specimen type, and unit. If none match well, pick the closest.
|
||||
JSON only, no explanation.`, name, specimen, unit, strings.Join(lines, "\n"))
|
||||
|
||||
resp, err := CallGemini(prompt)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("LLM call failed: %w", err)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Pick int `json:"pick"`
|
||||
Loinc string `json:"loinc"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||
return "", "", fmt.Errorf("parse LLM response: %w", err)
|
||||
}
|
||||
|
||||
if result.Loinc == "" && result.Pick > 0 && result.Pick <= len(candidates) {
|
||||
result.Loinc = candidates[result.Pick-1].Code
|
||||
result.Name = candidates[result.Pick-1].LongName
|
||||
}
|
||||
for _, c := range candidates {
|
||||
if c.Code == result.Loinc {
|
||||
return result.Loinc, c.LongName, nil
|
||||
}
|
||||
}
|
||||
if result.Pick > 0 && result.Pick <= len(candidates) {
|
||||
c := candidates[result.Pick-1]
|
||||
return c.Code, c.LongName, nil
|
||||
}
|
||||
return "", "", fmt.Errorf("LLM returned %q (pick %d) — not in %d candidates", result.Loinc, result.Pick, len(candidates))
|
||||
}
|
||||
271
lib/normalize.go
271
lib/normalize.go
|
|
@ -4,189 +4,129 @@ import (
|
|||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Normalize normalizes entry names within a dossier for a given category.
|
||||
// Uses heuristic pre-grouping + LLM to map variant names to canonical forms.
|
||||
// Updates Summary (display) and Data JSON (normalized_name, abbreviation).
|
||||
// Original Type field is never modified.
|
||||
// Silently returns nil if no API key is configured.
|
||||
// Normalize resolves hospital test names to official LOINC codes and updates entries.
|
||||
// Flow: hospital name + specimen + unit → LOINC (via cache or Gemini lookup) → official name from loinc_lab.
|
||||
// No Fireworks LLM. Original Type field is never modified.
|
||||
func Normalize(dossierID string, category int, progress ...func(processed, total int)) error {
|
||||
reportProgress := func(p, t int) {
|
||||
if len(progress) > 0 && progress[0] != nil {
|
||||
progress[0](p, t)
|
||||
}
|
||||
}
|
||||
if FireworksKey == "" {
|
||||
SendSignal("normalize: FIREWORKS_API_KEY not configured, skipping normalization")
|
||||
return nil
|
||||
}
|
||||
|
||||
// 1. Load all entries, collect types only from entries that need normalization
|
||||
// 1. Load all entries, build parent map for specimen lookup
|
||||
entries, err := EntryQueryOld(dossierID, category, "")
|
||||
if err != nil {
|
||||
return fmt.Errorf("load entries: %w", err)
|
||||
}
|
||||
|
||||
seen := make(map[string]bool)
|
||||
var allNames []string
|
||||
parentMap := make(map[string]*Entry)
|
||||
for _, e := range entries {
|
||||
if e.ParentID == "" || e.Type == "lab_order" {
|
||||
parentMap[e.EntryID] = e
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Collect unique type|specimen|unit combos, resolve each to LOINC
|
||||
type testKey struct {
|
||||
name, specimen, unit string
|
||||
}
|
||||
type resolved struct {
|
||||
loinc string
|
||||
info *LoincInfo
|
||||
abbr string
|
||||
}
|
||||
cache := make(map[testKey]*resolved)
|
||||
var lookupCount, cacheHits, misses int
|
||||
|
||||
for _, e := range entries {
|
||||
if e.ParentID == "" || e.Type == "lab_order" || e.Type == "" {
|
||||
continue
|
||||
}
|
||||
// FIXED(review-2026-02-28): Skip only if FULLY normalized (has both SearchKey2 AND LOINC)
|
||||
// Previously skipped on SearchKey2 alone, causing LOINC to never be populated
|
||||
if e.SearchKey2 != "" {
|
||||
var data map[string]interface{}
|
||||
json.Unmarshal([]byte(e.Data), &data)
|
||||
if loinc, ok := data["loinc"].(string); ok && loinc != "" {
|
||||
continue // fully normalized
|
||||
}
|
||||
// Has SearchKey2 but no LOINC - needs normalization
|
||||
}
|
||||
if !seen[e.Type] {
|
||||
seen[e.Type] = true
|
||||
allNames = append(allNames, e.Type)
|
||||
}
|
||||
}
|
||||
var data map[string]interface{}
|
||||
json.Unmarshal([]byte(e.Data), &data)
|
||||
unit, _ := data["unit"].(string)
|
||||
|
||||
if len(allNames) == 0 {
|
||||
log.Printf("normalize: all entries already normalized")
|
||||
return nil
|
||||
}
|
||||
|
||||
// 2. Pre-group by heuristic key (strip POCT, specimen suffixes, normalize case)
|
||||
groups := make(map[string][]string) // cleanKey → [original names]
|
||||
for _, name := range allNames {
|
||||
key := normalizeKey(name)
|
||||
groups[key] = append(groups[key], name)
|
||||
}
|
||||
|
||||
// Send just the group keys to LLM
|
||||
keys := make([]string, 0, len(groups))
|
||||
for k := range groups {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
||||
log.Printf("normalize: %d unique types → %d groups after pre-grouping", len(allNames), len(keys))
|
||||
|
||||
// 3. Call LLM with group keys (batched to stay within token limits)
|
||||
mapping := make(map[string]normMapping)
|
||||
batchSize := 50
|
||||
for i := 0; i < len(keys); i += batchSize {
|
||||
end := i + batchSize
|
||||
if end > len(keys) {
|
||||
end = len(keys)
|
||||
}
|
||||
batch := keys[i:end]
|
||||
reportProgress(end, len(keys))
|
||||
log.Printf("normalize: LLM batch %d-%d of %d", i+1, end, len(keys))
|
||||
|
||||
batchMap, err := callNormalizeLLM(batch)
|
||||
if err != nil {
|
||||
SendSignal(fmt.Sprintf("normalize: LLM batch %d-%d failed: %v", i+1, end, err))
|
||||
return fmt.Errorf("LLM batch %d-%d: %w", i+1, end, err)
|
||||
}
|
||||
for k, v := range batchMap {
|
||||
mapping[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Expand: each original name in a group gets the group's canonical mapping
|
||||
fullMapping := make(map[string]normMapping)
|
||||
for key, origNames := range groups {
|
||||
if m, ok := mapping[key]; ok {
|
||||
for _, orig := range origNames {
|
||||
fullMapping[orig] = m
|
||||
specimen := ""
|
||||
if parent, ok := parentMap[e.ParentID]; ok {
|
||||
var pdata map[string]interface{}
|
||||
if json.Unmarshal([]byte(parent.Data), &pdata) == nil {
|
||||
specimen, _ = pdata["specimen"].(string)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("normalize: LLM mapped %d groups → %d original names covered", len(mapping), len(fullMapping))
|
||||
|
||||
// 5. Save LabTest entries for any new LOINC codes
|
||||
seenLoinc := make(map[string]bool)
|
||||
var labTests []LabTest
|
||||
for _, m := range fullMapping {
|
||||
if m.Loinc == "" || seenLoinc[m.Loinc] {
|
||||
tk := testKey{e.Type, specimen, unit}
|
||||
if _, ok := cache[tk]; ok {
|
||||
continue
|
||||
}
|
||||
seenLoinc[m.Loinc] = true
|
||||
dir := m.Direction
|
||||
if dir == "" {
|
||||
dir = DirRange
|
||||
lookupCount++
|
||||
|
||||
loinc := LoincLookup(e.Type, specimen, unit)
|
||||
if loinc == "" {
|
||||
cache[tk] = &resolved{}
|
||||
misses++
|
||||
continue
|
||||
}
|
||||
factor := m.SIFactor
|
||||
if factor == 0 {
|
||||
factor = 1.0
|
||||
|
||||
info := LoincGet(loinc)
|
||||
abbr := LoincAbbr(info)
|
||||
cache[tk] = &resolved{loinc: loinc, info: info, abbr: abbr}
|
||||
if info != nil {
|
||||
cacheHits++
|
||||
}
|
||||
labTests = append(labTests, LabTest{
|
||||
LoincID: m.Loinc,
|
||||
Name: m.Name,
|
||||
SIUnit: m.SIUnit,
|
||||
Direction: dir,
|
||||
SIFactor: ToLabScale(factor),
|
||||
})
|
||||
}
|
||||
for _, t := range labTests {
|
||||
RefExec(`INSERT OR IGNORE INTO lab_test (loinc_id, name, si_unit, direction, si_factor) VALUES (?, ?, ?, ?, ?)`,
|
||||
t.LoincID, t.Name, t.SIUnit, t.Direction, t.SIFactor)
|
||||
}
|
||||
if len(labTests) > 0 {
|
||||
log.Printf("normalize: saved %d lab tests", len(labTests))
|
||||
}
|
||||
|
||||
// 5. Apply mapping to loaded entries, save only changed ones
|
||||
reportProgress(lookupCount, lookupCount)
|
||||
log.Printf("normalize: %d unique combos, %d resolved, %d unresolved", lookupCount, cacheHits, misses)
|
||||
|
||||
// 3. Apply to entries
|
||||
var toSave []Entry
|
||||
for _, e := range entries {
|
||||
if e.ParentID == "" {
|
||||
if e.ParentID == "" || e.Type == "lab_order" || e.Type == "" {
|
||||
continue
|
||||
}
|
||||
norm, ok := fullMapping[e.Type]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
var data map[string]interface{}
|
||||
if json.Unmarshal([]byte(e.Data), &data) != nil {
|
||||
data = make(map[string]interface{})
|
||||
}
|
||||
unit, _ := data["unit"].(string)
|
||||
|
||||
// Skip if already fully normalized
|
||||
existingName, _ := data["normalized_name"].(string)
|
||||
existingLoinc, _ := data["loinc"].(string)
|
||||
needsSearchKey := (norm.Loinc != "" && e.SearchKey == "")
|
||||
needsSearchKey2 := e.SearchKey2 == ""
|
||||
if existingName == norm.Name && (norm.Loinc == "" || existingLoinc == norm.Loinc) && !needsSearchKey && !needsSearchKey2 {
|
||||
specimen := ""
|
||||
if parent, ok := parentMap[e.ParentID]; ok {
|
||||
var pdata map[string]interface{}
|
||||
if json.Unmarshal([]byte(parent.Data), &pdata) == nil {
|
||||
specimen, _ = pdata["specimen"].(string)
|
||||
}
|
||||
}
|
||||
|
||||
r := cache[testKey{e.Type, specimen, unit}]
|
||||
if r == nil || r.loinc == "" || r.info == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
data["normalized_name"] = norm.Name
|
||||
data["abbreviation"] = norm.Abbr
|
||||
if norm.Loinc != "" {
|
||||
data["loinc"] = norm.Loinc
|
||||
}
|
||||
if norm.SIUnit != "" {
|
||||
data["si_unit"] = norm.SIUnit
|
||||
}
|
||||
if norm.SIFactor != 0 && norm.SIFactor != 1.0 {
|
||||
data["si_factor"] = norm.SIFactor
|
||||
// Check if already up to date
|
||||
existingLoinc, _ := data["loinc"].(string)
|
||||
existingName, _ := data["normalized_name"].(string)
|
||||
if existingLoinc == r.loinc && existingName == r.info.LongName && e.SearchKey == r.loinc && e.SearchKey2 != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Update Data JSON
|
||||
data["loinc"] = r.loinc
|
||||
data["normalized_name"] = r.info.LongName
|
||||
data["abbreviation"] = r.abbr
|
||||
b, _ := json.Marshal(data)
|
||||
e.Data = string(b)
|
||||
|
||||
// Update SearchKey with LOINC code, SearchKey2 with canonical test name
|
||||
if norm.Loinc != "" {
|
||||
e.SearchKey = norm.Loinc
|
||||
}
|
||||
e.SearchKey2 = strings.ToLower(norm.Name)
|
||||
// Update search keys
|
||||
e.SearchKey = r.loinc
|
||||
e.SearchKey2 = strings.ToLower(r.info.LongName)
|
||||
|
||||
// Rebuild Summary: "Abbr: value unit"
|
||||
unit, _ := data["unit"].(string)
|
||||
summary := norm.Abbr + ": " + e.Value
|
||||
summary := r.abbr + ": " + e.Value
|
||||
if unit != "" {
|
||||
summary += " " + unit
|
||||
}
|
||||
|
|
@ -207,62 +147,3 @@ func Normalize(dossierID string, category int, progress ...func(processed, total
|
|||
}
|
||||
return EntryWrite("", ptrs...)
|
||||
}
|
||||
|
||||
// normalizeKey reduces a test name to a heuristic grouping key.
|
||||
// Groups obvious duplicates: POCT variants, specimen suffixes, case.
|
||||
func normalizeKey(name string) string {
|
||||
s := strings.ToLower(strings.TrimSpace(name))
|
||||
s = strings.TrimPrefix(s, "poct ")
|
||||
// Strip specimen-type suffixes only (not qualifiers like ", total", ", direct")
|
||||
for _, suf := range []string{", whole blood", ", wblood", ", wb", ", wbl", ", blood", ", s/p", " ach"} {
|
||||
s = strings.TrimSuffix(s, suf)
|
||||
}
|
||||
return strings.TrimSpace(s)
|
||||
}
|
||||
|
||||
type normMapping struct {
|
||||
Name string `json:"name"`
|
||||
Abbr string `json:"abbr"`
|
||||
Loinc string `json:"loinc"`
|
||||
SIUnit string `json:"si_unit"`
|
||||
SIFactor float64 `json:"si_factor"`
|
||||
Direction string `json:"direction"`
|
||||
}
|
||||
|
||||
func callNormalizeLLM(names []string) (map[string]normMapping, error) {
|
||||
nameList := strings.Join(names, "\n")
|
||||
|
||||
prompt := fmt.Sprintf(`Normalize these medical test names. Return ONLY a JSON object, no explanation.
|
||||
|
||||
Each key is the EXACT input name. Value format: {"name":"Canonical Name","abbr":"Abbreviation","loinc":"LOINC","si_unit":"unit","si_factor":1.0,"direction":"range"}
|
||||
|
||||
Key LOINC codes: WBC=6690-2, RBC=789-8, Hemoglobin=718-7, Hematocrit=4544-3, MCV=787-2, MCH=785-6, MCHC=786-4, RDW=788-0, Platelets=777-3, Neutrophils%%=770-8, Lymphocytes%%=736-9, Monocytes%%=5905-5, Eosinophils%%=713-8, Basophils%%=706-2, Glucose=2345-7, BUN=3094-0, Creatinine=2160-0, Sodium=2951-2, Potassium=2823-3, Chloride=2075-0, CO2=2028-9, Calcium=17861-6, Total Protein=2885-2, Albumin=1751-7, Total Bilirubin=1975-2, ALP=6768-6, AST=1920-8, ALT=1742-6.
|
||||
|
||||
Abbreviations: WBC, RBC, Hgb, Hct, MCV, MCH, MCHC, RDW, PLT, Neut, Lymph, Mono, Eos, Baso, Glu, BUN, Cr, Na, K, Cl, CO2, Ca, TP, Alb, Bili, ALP, AST, ALT, Mg, Phos, Fe, etc.
|
||||
si_factor: conventional→SI multiplier (e.g. Hgb g/dL→g/L=10.0). Use 1.0 if same or unknown.
|
||||
direction: "range" (default), "lower_better" (CRP, LDL, glucose), "higher_better" (HDL).
|
||||
|
||||
Test names:
|
||||
%s`, nameList)
|
||||
|
||||
messages := []map[string]interface{}{
|
||||
{"role": "user", "content": prompt},
|
||||
}
|
||||
resp, err := CallFireworks("accounts/fireworks/models/qwen3-vl-30b-a3b-instruct", messages, 4096)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp = strings.TrimSpace(resp)
|
||||
resp = strings.TrimPrefix(resp, "```json")
|
||||
resp = strings.TrimPrefix(resp, "```")
|
||||
resp = strings.TrimSuffix(resp, "```")
|
||||
resp = strings.TrimSpace(resp)
|
||||
|
||||
var mapping map[string]normMapping
|
||||
if err := json.Unmarshal([]byte(resp), &mapping); err != nil {
|
||||
return nil, fmt.Errorf("parse response: %w (first 500 chars: %.500s)", err, resp)
|
||||
}
|
||||
|
||||
return mapping, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,374 @@
|
|||
package lib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
_ "embed"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
"image/draw"
|
||||
"image/png"
|
||||
"math"
|
||||
"strconv"
|
||||
|
||||
"github.com/chai2010/webp"
|
||||
"golang.org/x/image/font"
|
||||
"golang.org/x/image/font/opentype"
|
||||
"golang.org/x/image/math/fixed"
|
||||
|
||||
xdraw "golang.org/x/image/draw"
|
||||
)
|
||||
|
||||
//go:embed Sora-Regular.ttf
|
||||
var soraRegularData []byte
|
||||
|
||||
//go:embed Sora-SemiBold.ttf
|
||||
var soraSemiBoldData []byte
|
||||
|
||||
var (
|
||||
soraFace14 font.Face
|
||||
soraFace12 font.Face
|
||||
soraBoldFace14 font.Face
|
||||
soraBoldFace20 font.Face
|
||||
)
|
||||
|
||||
func init() {
|
||||
regular, err := opentype.Parse(soraRegularData)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
semibold, err := opentype.Parse(soraSemiBoldData)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
soraFace14, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 14, DPI: 72})
|
||||
soraFace12, _ = opentype.NewFace(regular, &opentype.FaceOptions{Size: 12, DPI: 72})
|
||||
soraBoldFace14, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 14, DPI: 72})
|
||||
soraBoldFace20, _ = opentype.NewFace(semibold, &opentype.FaceOptions{Size: 20, DPI: 72})
|
||||
}
|
||||
|
||||
// RenderImage returns a DICOM slice as webp bytes, resized to maxDim.
|
||||
func RenderImage(accessorID, id string, opts *ImageOpts, maxDim int) ([]byte, error) {
|
||||
img, err := ImageGet(accessorID, id, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if maxDim <= 0 {
|
||||
maxDim = 2000
|
||||
}
|
||||
bounds := img.Bounds()
|
||||
w0, h0 := bounds.Dx(), bounds.Dy()
|
||||
if w0 > maxDim || h0 > maxDim {
|
||||
scale := float64(maxDim) / float64(max(w0, h0))
|
||||
newW := int(float64(w0) * scale)
|
||||
newH := int(float64(h0) * scale)
|
||||
resized := image.NewRGBA(image.Rect(0, 0, newW, newH))
|
||||
xdraw.BiLinear.Scale(resized, resized.Bounds(), img, bounds, xdraw.Over, nil)
|
||||
img = resized
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := webp.Encode(&buf, img, &webp.Options{Lossless: true}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// RenderContactSheet returns a contact sheet webp for a series.
|
||||
func RenderContactSheet(accessorID, seriesID string, wc, ww float64) ([]byte, error) {
|
||||
series, err := entryGetByID(accessorID, seriesID)
|
||||
if err != nil || series == nil {
|
||||
return nil, fmt.Errorf("series not found")
|
||||
}
|
||||
dossierID := series.DossierID
|
||||
seriesDesc := series.Tags
|
||||
var seriesData struct {
|
||||
Modality string `json:"modality"`
|
||||
}
|
||||
json.Unmarshal([]byte(series.Data), &seriesData)
|
||||
|
||||
study, err := entryGetByID("", series.ParentID)
|
||||
if err != nil || study == nil {
|
||||
return nil, fmt.Errorf("study not found")
|
||||
}
|
||||
var studyData struct {
|
||||
StudyDate string `json:"study_date"`
|
||||
StudyDesc string `json:"study_description"`
|
||||
}
|
||||
json.Unmarshal([]byte(study.Data), &studyData)
|
||||
|
||||
dossier, _ := DossierGet("", dossierID)
|
||||
patientName := ""
|
||||
if dossier != nil {
|
||||
patientName = dossier.Name
|
||||
}
|
||||
|
||||
entries, err := EntryChildrenByType(dossierID, seriesID, "slice")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(entries) == 0 {
|
||||
return nil, fmt.Errorf("no slices found")
|
||||
}
|
||||
|
||||
var firstSliceData struct {
|
||||
SliceThickness float64 `json:"slice_thickness"`
|
||||
}
|
||||
json.Unmarshal([]byte(entries[0].Data), &firstSliceData)
|
||||
step := csStepSize(5.0, firstSliceData.SliceThickness)
|
||||
|
||||
hasOverride := wc != 0 || ww != 0
|
||||
|
||||
type thumbInfo struct {
|
||||
img image.Image
|
||||
sliceNum int
|
||||
pos float64
|
||||
entryID string
|
||||
}
|
||||
var thumbs []thumbInfo
|
||||
var usedWC, usedWW float64
|
||||
for i, e := range entries {
|
||||
if (i+1)%step != 1 && step != 1 {
|
||||
continue
|
||||
}
|
||||
thumb, pos, twc, tww := csLoadThumb(accessorID, e, wc, ww, hasOverride)
|
||||
if thumb != nil {
|
||||
thumbs = append(thumbs, thumbInfo{img: thumb, sliceNum: i + 1, pos: pos, entryID: e.EntryID})
|
||||
if usedWC == 0 && usedWW == 0 {
|
||||
usedWC, usedWW = twc, tww
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(thumbs) == 0 {
|
||||
return nil, fmt.Errorf("could not load any images")
|
||||
}
|
||||
|
||||
const thumbSz = 128
|
||||
const hdrHeight = 58
|
||||
const gridCols = 12
|
||||
const pad = 2
|
||||
|
||||
activeCols := gridCols
|
||||
if len(thumbs) < activeCols {
|
||||
activeCols = len(thumbs)
|
||||
}
|
||||
gridRows := (len(thumbs) + gridCols - 1) / gridCols
|
||||
|
||||
outWidth := gridCols*thumbSz + (gridCols-1)*pad
|
||||
outHeight := hdrHeight + gridRows*thumbSz + (gridRows-1)*pad
|
||||
|
||||
out := image.NewRGBA(image.Rect(0, 0, outWidth, outHeight))
|
||||
draw.Draw(out, out.Bounds(), &image.Uniform{color.RGBA{80, 80, 80, 255}}, image.Point{}, draw.Src)
|
||||
|
||||
csDrawHeader(out, hdrHeight, patientName, studyData.StudyDesc, seriesDesc, seriesData.Modality, studyData.StudyDate, len(entries), firstSliceData.SliceThickness, step, usedWC, usedWW)
|
||||
|
||||
for i, t := range thumbs {
|
||||
col := i % gridCols
|
||||
row := i / gridCols
|
||||
x := col * (thumbSz + pad)
|
||||
y := hdrHeight + row*(thumbSz+pad)
|
||||
draw.Draw(out, image.Rect(x, y, x+thumbSz, y+thumbSz), t.img, image.Point{}, draw.Src)
|
||||
csDrawNumber(out, x+2, y+2, t.sliceNum)
|
||||
csDrawPosition(out, x+thumbSz-2, y+2, t.pos)
|
||||
csDrawHexID(out, x+2, y+thumbSz-4, t.entryID)
|
||||
}
|
||||
|
||||
for i := len(thumbs); i < gridRows*gridCols; i++ {
|
||||
col := i % gridCols
|
||||
row := i / gridCols
|
||||
x := col * (thumbSz + pad)
|
||||
y := hdrHeight + row*(thumbSz+pad)
|
||||
draw.Draw(out, image.Rect(x, y, x+thumbSz, y+thumbSz), &image.Uniform{color.Black}, image.Point{}, draw.Src)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := webp.Encode(&buf, out, &webp.Options{Quality: 10}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// --- contact sheet helpers ---
|
||||
|
||||
func csStepSize(spacingMM, thicknessMM float64) int {
|
||||
if thicknessMM <= 0 {
|
||||
return 1
|
||||
}
|
||||
step := int(math.Round(spacingMM / thicknessMM))
|
||||
if step < 1 {
|
||||
step = 1
|
||||
}
|
||||
return step
|
||||
}
|
||||
|
||||
func csLoadThumb(accessorID string, e *Entry, wcOverride, wwOverride float64, hasOverride bool) (image.Image, float64, float64, float64) {
|
||||
var data struct {
|
||||
WindowCenter float64 `json:"window_center"`
|
||||
WindowWidth float64 `json:"window_width"`
|
||||
PixelMin int `json:"pixel_min"`
|
||||
PixelMax int `json:"pixel_max"`
|
||||
SliceLocation float64 `json:"slice_location"`
|
||||
}
|
||||
json.Unmarshal([]byte(e.Data), &data)
|
||||
|
||||
var center, width float64
|
||||
if hasOverride {
|
||||
center, width = wcOverride, wwOverride
|
||||
if width == 0 {
|
||||
width = 1
|
||||
}
|
||||
} else {
|
||||
center, width = data.WindowCenter, data.WindowWidth
|
||||
if center == 0 && width == 0 {
|
||||
center = float64(data.PixelMin+data.PixelMax) / 2
|
||||
width = float64(data.PixelMax - data.PixelMin)
|
||||
if width == 0 {
|
||||
width = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dec, err := ObjectRead(&AccessContext{AccessorID: accessorID}, e.DossierID, e.EntryID)
|
||||
if err != nil {
|
||||
return nil, 0, 0, 0
|
||||
}
|
||||
img, err := png.Decode(bytes.NewReader(dec))
|
||||
if err != nil {
|
||||
return nil, 0, 0, 0
|
||||
}
|
||||
|
||||
const thumbSz = 128
|
||||
bounds := img.Bounds()
|
||||
var processed image.Image
|
||||
|
||||
switch src := img.(type) {
|
||||
case *image.Gray16:
|
||||
low, high := center-width/2, center+width/2
|
||||
gray := image.NewGray(bounds)
|
||||
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
|
||||
for x := bounds.Min.X; x < bounds.Max.X; x++ {
|
||||
v := float64(src.Gray16At(x, y).Y)
|
||||
var out uint8
|
||||
if v <= low {
|
||||
out = 0
|
||||
} else if v >= high {
|
||||
out = 255
|
||||
} else {
|
||||
out = uint8((v - low) * 255 / width)
|
||||
}
|
||||
gray.SetGray(x, y, color.Gray{Y: out})
|
||||
}
|
||||
}
|
||||
processed = gray
|
||||
case *image.RGBA, *image.NRGBA:
|
||||
processed = src
|
||||
default:
|
||||
return nil, 0, 0, 0
|
||||
}
|
||||
|
||||
thumb := image.NewRGBA(image.Rect(0, 0, thumbSz, thumbSz))
|
||||
xdraw.BiLinear.Scale(thumb, thumb.Bounds(), processed, processed.Bounds(), xdraw.Over, nil)
|
||||
return thumb, data.SliceLocation, center, width
|
||||
}
|
||||
|
||||
func csDrawHeader(img *image.RGBA, hdrHeight int, patient, study, series, modality, date string, totalSlices int, sliceThickness float64, step int, wc, ww float64) {
|
||||
if len(date) == 8 {
|
||||
date = date[0:4] + "-" + date[4:6] + "-" + date[6:8]
|
||||
}
|
||||
draw.Draw(img, image.Rect(0, 0, img.Bounds().Dx(), hdrHeight),
|
||||
&image.Uniform{color.RGBA{32, 32, 32, 255}}, image.Point{}, draw.Src)
|
||||
|
||||
csDrawStringBold20(img, 10, 22, "!! NAVIGATION ONLY - USE fetch_image FOR DIAGNOSIS !!", color.RGBA{255, 50, 50, 255})
|
||||
|
||||
stInfo := strconv.FormatFloat(sliceThickness, 'f', 1, 64) + "mm"
|
||||
if step > 1 {
|
||||
stInfo += " (every " + strconv.Itoa(step) + ")"
|
||||
}
|
||||
wlInfo := "WC:" + strconv.FormatFloat(wc, 'f', 0, 64) + " WW:" + strconv.FormatFloat(ww, 'f', 0, 64)
|
||||
|
||||
csDrawStringBold14(img, 10, 38, patient, color.RGBA{255, 255, 255, 255})
|
||||
patientWidth := csMeasureStringBold(patient)
|
||||
rest := " | " + modality + " " + series + " | " + date + " | " + strconv.Itoa(totalSlices) + " slices | ST " + stInfo + " | " + wlInfo
|
||||
csDrawString14(img, 10+patientWidth, 38, rest, color.RGBA{200, 200, 200, 255})
|
||||
|
||||
instructions := "Top-left: slice# Top-right: position(mm) Bottom: entry ID for fetch_image"
|
||||
csDrawString14(img, 10, 54, instructions, color.RGBA{255, 255, 255, 255})
|
||||
}
|
||||
|
||||
func csDrawNumber(img *image.RGBA, x, y, num int) {
|
||||
s := strconv.Itoa(num)
|
||||
textWidth := csMeasureString(s, 14) + 6
|
||||
for dy := 0; dy < 18; dy++ {
|
||||
for dx := 0; dx < textWidth; dx++ {
|
||||
img.SetRGBA(x+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
||||
}
|
||||
}
|
||||
csDrawStringSize(img, x+3, y+14, s, color.RGBA{255, 255, 0, 255}, 14)
|
||||
}
|
||||
|
||||
func csDrawHexID(img *image.RGBA, x, y int, id string) {
|
||||
csDrawStringSize(img, x+4, y, id, color.RGBA{255, 255, 0, 255}, 12)
|
||||
}
|
||||
|
||||
func csDrawPosition(img *image.RGBA, x, y int, pos float64) {
|
||||
s := strconv.FormatFloat(pos, 'f', 1, 64) + "mm"
|
||||
textWidth := csMeasureString(s, 12) + 6
|
||||
for dy := 0; dy < 16; dy++ {
|
||||
for dx := 0; dx < textWidth; dx++ {
|
||||
img.SetRGBA(x-textWidth+dx, y+dy, color.RGBA{0, 0, 0, 200})
|
||||
}
|
||||
}
|
||||
csDrawStringSize(img, x-textWidth+3, y+12, s, color.RGBA{255, 255, 0, 255}, 12)
|
||||
}
|
||||
|
||||
func csDrawString14(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||
csDrawStringSize(img, x, y, s, col, 14)
|
||||
}
|
||||
|
||||
func csDrawStringSize(img *image.RGBA, x, y int, s string, col color.RGBA, size int) {
|
||||
face := soraFace14
|
||||
if size <= 12 {
|
||||
face = soraFace12
|
||||
}
|
||||
if face == nil {
|
||||
return
|
||||
}
|
||||
d := &font.Drawer{Dst: img, Src: &image.Uniform{col}, Face: face, Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)}}
|
||||
d.DrawString(s)
|
||||
}
|
||||
|
||||
func csDrawStringBold14(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||
if soraBoldFace14 == nil {
|
||||
return
|
||||
}
|
||||
d := &font.Drawer{Dst: img, Src: &image.Uniform{col}, Face: soraBoldFace14, Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)}}
|
||||
d.DrawString(s)
|
||||
}
|
||||
|
||||
func csDrawStringBold20(img *image.RGBA, x, y int, s string, col color.RGBA) {
|
||||
if soraBoldFace20 == nil {
|
||||
return
|
||||
}
|
||||
d := &font.Drawer{Dst: img, Src: &image.Uniform{col}, Face: soraBoldFace20, Dot: fixed.Point26_6{X: fixed.I(x), Y: fixed.I(y)}}
|
||||
d.DrawString(s)
|
||||
}
|
||||
|
||||
func csMeasureString(s string, size int) int {
|
||||
face := soraFace14
|
||||
if size <= 12 {
|
||||
face = soraFace12
|
||||
}
|
||||
if face == nil {
|
||||
return len(s) * 8
|
||||
}
|
||||
return (&font.Drawer{Face: face}).MeasureString(s).Ceil()
|
||||
}
|
||||
|
||||
func csMeasureStringBold(s string) int {
|
||||
if soraBoldFace14 == nil {
|
||||
return len(s) * 8
|
||||
}
|
||||
return (&font.Drawer{Face: soraBoldFace14}).MeasureString(s).Ceil()
|
||||
}
|
||||
|
|
@ -123,11 +123,6 @@ func EntryTypes(dossierID string, category int) ([]string, error) {
|
|||
|
||||
// --- Dossier stubs ---
|
||||
|
||||
func DossierGetBySessionToken(token string) *Dossier {
|
||||
log.Printf("[STUB] DossierGetBySessionToken")
|
||||
return nil
|
||||
}
|
||||
|
||||
func DossierQuery(accessorID string) ([]*DossierQueryRow, error) {
|
||||
// Get all accessible dossier profiles via RBAC
|
||||
dossierEntries, err := EntryRead(accessorID, "", &Filter{Category: 0})
|
||||
|
|
|
|||
|
|
@ -0,0 +1,720 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>inou — Genetics</title>
|
||||
<style>
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Regular.ttf'); font-weight: 400; }
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Light.ttf'); font-weight: 300; }
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-SemiBold.ttf'); font-weight: 600; }
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Bold.ttf'); font-weight: 700; }
|
||||
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
:root {
|
||||
--amber: #B45309;
|
||||
--amber-light: #FEF3C7;
|
||||
--amber-mid: #F59E0B;
|
||||
--bg: #F8F7F6;
|
||||
--surface: #FFFFFF;
|
||||
--border: #E5E3E0;
|
||||
--text: #1A1A1A;
|
||||
--text-muted: #6B6968;
|
||||
--text-faint: #A8A5A2;
|
||||
--green: #15803D;
|
||||
--green-light: #DCFCE7;
|
||||
--nav-bg: #1C1917;
|
||||
--nav-text: #D6D3D1;
|
||||
--nav-active: #FFFFFF;
|
||||
--sidebar-w: 220px;
|
||||
--topbar-h: 52px;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Sora', system-ui, sans-serif;
|
||||
background: var(--bg);
|
||||
color: var(--text);
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
/* TOP NAV */
|
||||
.topbar {
|
||||
height: var(--topbar-h);
|
||||
background: var(--nav-bg);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0 20px;
|
||||
gap: 16px;
|
||||
flex-shrink: 0;
|
||||
border-bottom: 1px solid #2C2A28;
|
||||
}
|
||||
.topbar-logo {
|
||||
font-weight: 700;
|
||||
font-size: 16px;
|
||||
color: #FFFFFF;
|
||||
letter-spacing: -0.3px;
|
||||
}
|
||||
.topbar-logo span { color: var(--amber); }
|
||||
.topbar-patient {
|
||||
margin-left: auto;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
}
|
||||
.topbar-patient-name {
|
||||
font-size: 13px;
|
||||
color: var(--nav-text);
|
||||
font-weight: 600;
|
||||
}
|
||||
.topbar-patient-dob {
|
||||
font-size: 12px;
|
||||
color: #78716C;
|
||||
}
|
||||
.avatar {
|
||||
width: 30px; height: 30px;
|
||||
border-radius: 50%;
|
||||
background: var(--amber);
|
||||
display: flex; align-items: center; justify-content: center;
|
||||
font-size: 12px; font-weight: 700; color: white;
|
||||
}
|
||||
|
||||
/* LAYOUT */
|
||||
.layout {
|
||||
display: flex;
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* SIDEBAR */
|
||||
.sidebar {
|
||||
width: var(--sidebar-w);
|
||||
background: var(--nav-bg);
|
||||
flex-shrink: 0;
|
||||
overflow-y: auto;
|
||||
padding: 12px 0;
|
||||
border-right: 1px solid #2C2A28;
|
||||
}
|
||||
.nav-section-label {
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
color: #57534E;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.8px;
|
||||
padding: 12px 16px 4px;
|
||||
}
|
||||
.nav-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 7px 16px;
|
||||
font-size: 13px;
|
||||
color: var(--nav-text);
|
||||
cursor: pointer;
|
||||
border-radius: 0;
|
||||
text-decoration: none;
|
||||
}
|
||||
.nav-item:hover { background: #292524; }
|
||||
.nav-item.active {
|
||||
color: var(--nav-active);
|
||||
background: #292524;
|
||||
font-weight: 600;
|
||||
}
|
||||
.nav-item.active::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
width: 3px;
|
||||
height: 28px;
|
||||
background: var(--amber);
|
||||
border-radius: 0 2px 2px 0;
|
||||
}
|
||||
.nav-item { position: relative; }
|
||||
.nav-dot {
|
||||
width: 6px; height: 6px;
|
||||
border-radius: 50%;
|
||||
background: #57534E;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.nav-dot.active { background: var(--amber); }
|
||||
.nav-sub {
|
||||
padding-left: 12px;
|
||||
}
|
||||
|
||||
/* MAIN */
|
||||
.main {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding: 24px 32px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
/* BREADCRUMB + SEARCH ROW */
|
||||
.top-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
}
|
||||
.breadcrumb {
|
||||
font-size: 13px;
|
||||
color: var(--text-muted);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
.breadcrumb span { color: var(--text); font-weight: 600; }
|
||||
.breadcrumb-sep { color: var(--text-faint); }
|
||||
|
||||
.search-box {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 7px 12px;
|
||||
width: 260px;
|
||||
}
|
||||
.search-box input {
|
||||
border: none;
|
||||
outline: none;
|
||||
font-family: 'Sora', sans-serif;
|
||||
font-size: 13px;
|
||||
color: var(--text);
|
||||
background: transparent;
|
||||
width: 100%;
|
||||
}
|
||||
.search-box input::placeholder { color: var(--text-faint); }
|
||||
.search-icon { color: var(--text-faint); font-size: 15px; }
|
||||
|
||||
/* STATS ROW */
|
||||
.stats-row {
|
||||
display: flex;
|
||||
gap: 24px;
|
||||
align-items: center;
|
||||
}
|
||||
.stat {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 1px;
|
||||
}
|
||||
.stat-value {
|
||||
font-size: 22px;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
line-height: 1;
|
||||
}
|
||||
.stat-label {
|
||||
font-size: 11px;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
.stat-divider {
|
||||
width: 1px;
|
||||
height: 32px;
|
||||
background: var(--border);
|
||||
}
|
||||
.hidden-note {
|
||||
font-size: 12px;
|
||||
color: var(--text-faint);
|
||||
padding: 4px 10px;
|
||||
background: var(--bg);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 20px;
|
||||
margin-left: auto;
|
||||
cursor: pointer;
|
||||
}
|
||||
.hidden-note:hover { border-color: var(--amber); color: var(--amber); }
|
||||
|
||||
/* TIER GRID */
|
||||
.tier-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(4, 1fr);
|
||||
gap: 10px;
|
||||
}
|
||||
.tier-tile {
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 10px;
|
||||
padding: 14px 16px;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.15s, box-shadow 0.15s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
.tier-tile:hover {
|
||||
border-color: #D1C5BA;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.06);
|
||||
}
|
||||
.tier-tile.active {
|
||||
border-color: var(--amber);
|
||||
box-shadow: 0 0 0 1px var(--amber);
|
||||
}
|
||||
.tier-name {
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
color: var(--text);
|
||||
}
|
||||
.tier-count {
|
||||
font-size: 20px;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
line-height: 1.1;
|
||||
}
|
||||
.tier-hidden {
|
||||
font-size: 11px;
|
||||
color: var(--text-faint);
|
||||
}
|
||||
.tier-tile.large .tier-count { color: var(--amber); }
|
||||
|
||||
/* EXPANDED TIER */
|
||||
.expanded-section {
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 12px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.expanded-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 14px 20px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
background: #FAFAF9;
|
||||
}
|
||||
.expanded-title {
|
||||
font-size: 14px;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
}
|
||||
.expanded-subtitle {
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
font-weight: 400;
|
||||
}
|
||||
.toggle-hidden {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
}
|
||||
.toggle-pill {
|
||||
width: 28px; height: 16px;
|
||||
background: var(--border);
|
||||
border-radius: 8px;
|
||||
position: relative;
|
||||
}
|
||||
.toggle-pill::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 2px; left: 2px;
|
||||
width: 12px; height: 12px;
|
||||
border-radius: 50%;
|
||||
background: white;
|
||||
box-shadow: 0 1px 2px rgba(0,0,0,0.2);
|
||||
}
|
||||
|
||||
/* VARIANTS TABLE */
|
||||
.variants-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.variants-table th {
|
||||
text-align: left;
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
color: var(--text-faint);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
padding: 10px 20px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
background: #FAFAF9;
|
||||
}
|
||||
.variants-table td {
|
||||
padding: 11px 20px;
|
||||
border-bottom: 1px solid #F0EDEA;
|
||||
vertical-align: middle;
|
||||
}
|
||||
.variants-table tr:last-child td { border-bottom: none; }
|
||||
.variants-table tr:hover td { background: #FAFAF9; }
|
||||
|
||||
.gene-name {
|
||||
font-weight: 600;
|
||||
font-size: 13px;
|
||||
color: var(--text);
|
||||
font-family: 'Sora', monospace;
|
||||
}
|
||||
.rsid {
|
||||
font-size: 11px;
|
||||
color: var(--text-faint);
|
||||
margin-top: 1px;
|
||||
}
|
||||
.finding-text {
|
||||
font-size: 13px;
|
||||
color: var(--text);
|
||||
}
|
||||
.genotype {
|
||||
font-family: 'Sora', monospace;
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
color: var(--text);
|
||||
background: #F4F1EE;
|
||||
padding: 2px 8px;
|
||||
border-radius: 4px;
|
||||
display: inline-block;
|
||||
}
|
||||
.sig-dot {
|
||||
width: 8px; height: 8px;
|
||||
border-radius: 50%;
|
||||
display: inline-block;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.sig-cell {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
white-space: nowrap;
|
||||
}
|
||||
.sig-dot.moderate { background: var(--amber); }
|
||||
.sig-dot.protective { background: var(--green); }
|
||||
.sig-dot.low { background: var(--text-faint); }
|
||||
.sig-dot.clear { background: #D4D0CB; }
|
||||
|
||||
.sig-label.moderate { color: var(--amber); }
|
||||
.sig-label.protective { color: var(--green); }
|
||||
.sig-label.low { color: var(--text-faint); }
|
||||
.sig-label.clear { color: var(--text-faint); }
|
||||
|
||||
/* EXPANDED FOOTER */
|
||||
.expanded-footer {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 12px 20px;
|
||||
border-top: 1px solid var(--border);
|
||||
background: #FAFAF9;
|
||||
}
|
||||
.footer-count {
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
.load-more {
|
||||
font-size: 12px;
|
||||
color: var(--amber);
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
text-decoration: none;
|
||||
}
|
||||
.load-more:hover { text-decoration: underline; }
|
||||
|
||||
/* AI CTA */
|
||||
.ai-cta {
|
||||
background: var(--nav-bg);
|
||||
border-radius: 12px;
|
||||
padding: 16px 20px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
}
|
||||
.ai-cta-text {
|
||||
flex: 1;
|
||||
font-size: 13px;
|
||||
color: #A8A5A2;
|
||||
line-height: 1.5;
|
||||
}
|
||||
.ai-cta-text strong { color: #FFFFFF; font-weight: 600; }
|
||||
.ai-cta-btn {
|
||||
background: var(--amber);
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 8px;
|
||||
padding: 9px 18px;
|
||||
font-family: 'Sora', sans-serif;
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.ai-cta-btn:hover { background: #9A4507; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<!-- TOP NAV -->
|
||||
<div class="topbar">
|
||||
<div class="topbar-logo">inou<span>.</span></div>
|
||||
<div class="topbar-patient">
|
||||
<div>
|
||||
<div class="topbar-patient-name">Jane Doe</div>
|
||||
<div class="topbar-patient-dob">DOB Jan 1 2017 · Female</div>
|
||||
</div>
|
||||
<div class="avatar">JD</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="layout">
|
||||
|
||||
<!-- SIDEBAR -->
|
||||
<nav class="sidebar">
|
||||
<div class="nav-section-label">Overview</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Dashboard</a>
|
||||
|
||||
<div class="nav-section-label">Tests</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Labs</a>
|
||||
<a class="nav-item active" href="#"><span class="nav-dot active"></span>Genetics</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Imaging</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Assessments</a>
|
||||
|
||||
<div class="nav-section-label">Body</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Vitals</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Exercise</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Nutrition</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Sleep</a>
|
||||
|
||||
<div class="nav-section-label">Treatment</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Medications</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Supplements</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Therapy</a>
|
||||
|
||||
<div class="nav-section-label">History</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Diagnoses</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Symptoms</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Family History</a>
|
||||
|
||||
<div class="nav-section-label">Care Team</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Consultations</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Providers</a>
|
||||
|
||||
<div class="nav-section-label">Files</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Documents</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Uploads</a>
|
||||
</nav>
|
||||
|
||||
<!-- MAIN CONTENT -->
|
||||
<main class="main">
|
||||
|
||||
<!-- BREADCRUMB + SEARCH -->
|
||||
<div class="top-row">
|
||||
<div class="breadcrumb">
|
||||
Jane Doe <span class="breadcrumb-sep">›</span> Tests <span class="breadcrumb-sep">›</span> <span>Genetics</span>
|
||||
</div>
|
||||
<div class="search-box">
|
||||
<span class="search-icon">⌕</span>
|
||||
<input type="text" placeholder="Search gene or rsID…">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- STATS ROW -->
|
||||
<div class="stats-row">
|
||||
<div class="stat">
|
||||
<div class="stat-value">3,866</div>
|
||||
<div class="stat-label">Total variants</div>
|
||||
</div>
|
||||
<div class="stat-divider"></div>
|
||||
<div class="stat">
|
||||
<div class="stat-value">12</div>
|
||||
<div class="stat-label">Categories</div>
|
||||
</div>
|
||||
<div class="stat-divider"></div>
|
||||
<div class="stat">
|
||||
<div class="stat-value">597</div>
|
||||
<div class="stat-label">Hidden (no risk)</div>
|
||||
</div>
|
||||
<div class="hidden-note">Show hidden variants</div>
|
||||
</div>
|
||||
|
||||
<!-- TIER GRID -->
|
||||
<div class="tier-grid">
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Traits</div>
|
||||
<div class="tier-count">132</div>
|
||||
<div class="tier-hidden">49 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Longevity</div>
|
||||
<div class="tier-count">12</div>
|
||||
<div class="tier-hidden">1 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile active">
|
||||
<div class="tier-name">Metabolism</div>
|
||||
<div class="tier-count">97</div>
|
||||
<div class="tier-hidden">51 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Medications</div>
|
||||
<div class="tier-count">101</div>
|
||||
<div class="tier-hidden">26 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Mental Health</div>
|
||||
<div class="tier-count">63</div>
|
||||
<div class="tier-hidden">31 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Neurological</div>
|
||||
<div class="tier-count">91</div>
|
||||
<div class="tier-hidden">46 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Fertility</div>
|
||||
<div class="tier-count">12</div>
|
||||
<div class="tier-hidden">7 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Blood</div>
|
||||
<div class="tier-count">100</div>
|
||||
<div class="tier-hidden">12 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Cardiovascular</div>
|
||||
<div class="tier-count">104</div>
|
||||
<div class="tier-hidden">31 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Autoimmune</div>
|
||||
<div class="tier-count">80</div>
|
||||
<div class="tier-hidden">43 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile large">
|
||||
<div class="tier-name">Disease</div>
|
||||
<div class="tier-count">2,272</div>
|
||||
<div class="tier-hidden">233 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile large">
|
||||
<div class="tier-name">Cancer</div>
|
||||
<div class="tier-count">998</div>
|
||||
<div class="tier-hidden">67 hidden</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- EXPANDED: METABOLISM -->
|
||||
<div class="expanded-section">
|
||||
<div class="expanded-header">
|
||||
<div>
|
||||
<div class="expanded-title">Metabolism <span style="font-weight:400; color: var(--text-muted)">· 97 variants</span></div>
|
||||
<div class="expanded-subtitle">Sorted by significance</div>
|
||||
</div>
|
||||
<div class="toggle-hidden">
|
||||
<span>Show hidden</span>
|
||||
<div class="toggle-pill"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<table class="variants-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width:130px">Gene</th>
|
||||
<th>Finding</th>
|
||||
<th style="width:90px">Genotype</th>
|
||||
<th style="width:120px">Significance</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><div class="gene-name">MTHFR</div><div class="rsid">rs1801133</div></td>
|
||||
<td><div class="finding-text">10–20% folate processing efficiency</div></td>
|
||||
<td><span class="genotype">AA</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">CYP2C19</div><div class="rsid">rs4244285</div></td>
|
||||
<td><div class="finding-text">Poorer metabolizer of several medicines</div></td>
|
||||
<td><span class="genotype">AG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">PPARG</div><div class="rsid">rs1801282</div></td>
|
||||
<td><div class="finding-text">Higher cardiovascular risk with high fat diet</div></td>
|
||||
<td><span class="genotype">CG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">TCF7L2</div><div class="rsid">rs7903146</div></td>
|
||||
<td><div class="finding-text">Increased type 2 diabetes risk</div></td>
|
||||
<td><span class="genotype">CT</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">FTO</div><div class="rsid">rs9939609</div></td>
|
||||
<td><div class="finding-text">1.67× increased obesity risk</div></td>
|
||||
<td><span class="genotype">AT</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">SLCO1B1</div><div class="rsid">rs4149056</div></td>
|
||||
<td><div class="finding-text">Increased statin-induced myopathy risk</div></td>
|
||||
<td><span class="genotype">CT</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">APOA2</div><div class="rsid">rs5082</div></td>
|
||||
<td><div class="finding-text">Associated with higher HDL cholesterol</div></td>
|
||||
<td><span class="genotype">CC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot protective"></span><span class="sig-label protective">Protective</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">CYP1A2</div><div class="rsid">rs762551</div></td>
|
||||
<td><div class="finding-text">Slow caffeine metabolizer</div></td>
|
||||
<td><span class="genotype">AC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">CYP3A5</div><div class="rsid">rs776746</div></td>
|
||||
<td><div class="finding-text">Non-expressor — affects drug dosing</div></td>
|
||||
<td><span class="genotype">CC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">MCM6</div><div class="rsid">rs4988235</div></td>
|
||||
<td><div class="finding-text">Partial lactase persistence</div></td>
|
||||
<td><span class="genotype">AG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">APOE</div><div class="rsid">rs7412</div></td>
|
||||
<td><div class="finding-text">Normal lipid metabolism</div></td>
|
||||
<td><span class="genotype">CC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot clear"></span><span class="sig-label clear">Clear</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">GCK</div><div class="rsid">rs1799884</div></td>
|
||||
<td><div class="finding-text">Slightly reduced glucose sensing</div></td>
|
||||
<td><span class="genotype">AG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div class="expanded-footer">
|
||||
<div class="footer-count">Showing 12 of 97 variants</div>
|
||||
<a class="load-more" href="#">Load more</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- AI CTA -->
|
||||
<div class="ai-cta">
|
||||
<div class="ai-cta-text">
|
||||
<strong>Your AI has access to all 3,866 variants</strong>, including hidden ones. Ask it to reason across your metabolism, medication sensitivities, and disease risk together.
|
||||
</div>
|
||||
<button class="ai-cta-btn">Ask Claude about your genetics →</button>
|
||||
</div>
|
||||
|
||||
</main>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 810 KiB After Width: | Height: | Size: 680 KiB |
|
|
@ -4,6 +4,7 @@ import (
|
|||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html"
|
||||
"io"
|
||||
"net/http"
|
||||
"sort"
|
||||
|
|
@ -189,7 +190,7 @@ func fetchStudiesWithSeries(dossierHex string) ([]Study, error) {
|
|||
for _, ser := range apiSeries {
|
||||
s.Series = append(s.Series, Series{
|
||||
ID: ser.ID,
|
||||
Description: ser.SeriesDesc,
|
||||
Description: html.EscapeString(ser.SeriesDesc), // FIX TASK-019: XSS prevention
|
||||
Modality: ser.Modality,
|
||||
SliceCount: ser.SliceCount,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import (
|
|||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"inou/lib"
|
||||
)
|
||||
|
|
@ -185,6 +186,12 @@ type APIDossierEntry struct {
|
|||
Name string `json:"name"`
|
||||
Relation string `json:"relation"`
|
||||
CanAdd bool `json:"can_add"`
|
||||
Initials string `json:"initials"`
|
||||
Color string `json:"color"`
|
||||
Age string `json:"age"` // e.g. "8y", "42y", "" if unknown
|
||||
DOB string `json:"dob"` // YYYY-MM-DD or ""
|
||||
Sex int `json:"sex"` // 0=unknown, 1=male, 2=female
|
||||
IsSelf bool `json:"is_self"`
|
||||
}
|
||||
|
||||
type APIDashboardResponse struct {
|
||||
|
|
@ -206,9 +213,15 @@ func handleAPIDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
// Add self first
|
||||
dossiers = append(dossiers, APIDossierEntry{
|
||||
GUID: formatHexID(d.DossierID),
|
||||
Name: "", // Empty means "self"
|
||||
Name: d.Name,
|
||||
Relation: "self",
|
||||
CanAdd: true,
|
||||
Initials: apiInitials(d.Name),
|
||||
Color: apiColor(d.DossierID),
|
||||
Age: apiAge(d.DateOfBirth),
|
||||
DOB: apiDOB(d.DateOfBirth),
|
||||
Sex: d.Sex,
|
||||
IsSelf: true,
|
||||
})
|
||||
|
||||
// Add others
|
||||
|
|
@ -223,8 +236,13 @@ func handleAPIDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
dossiers = append(dossiers, APIDossierEntry{
|
||||
GUID: formatHexID(a.DossierID),
|
||||
Name: target.Name,
|
||||
Relation: "other", // Relation removed from RBAC
|
||||
Relation: relationName(a.Relation),
|
||||
CanAdd: (a.Ops & lib.PermWrite) != 0,
|
||||
Initials: apiInitials(target.Name),
|
||||
Color: apiColor(a.DossierID),
|
||||
Age: apiAge(target.DateOfBirth),
|
||||
DOB: apiDOB(target.DateOfBirth),
|
||||
Sex: target.Sex,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -306,3 +324,41 @@ func relationName(rel int) string {
|
|||
default: return "other"
|
||||
}
|
||||
}
|
||||
|
||||
func apiInitials(name string) string {
|
||||
parts := strings.Fields(name)
|
||||
if len(parts) == 0 { return "?" }
|
||||
first := []rune(parts[0])
|
||||
if len(parts) == 1 {
|
||||
if len(first) == 0 { return "?" }
|
||||
return strings.ToUpper(string(first[:1]))
|
||||
}
|
||||
last := []rune(parts[len(parts)-1])
|
||||
if len(first) == 0 || len(last) == 0 { return "?" }
|
||||
return strings.ToUpper(string(first[:1]) + string(last[:1]))
|
||||
}
|
||||
|
||||
var apiAvatarColors = []string{
|
||||
"#C47A3D", "#5AAD8A", "#7E8FC2", "#C26E6E",
|
||||
"#9A82B8", "#C2963D", "#6BA0B8", "#B87898",
|
||||
"#7DAD6B", "#8B7D6B",
|
||||
}
|
||||
|
||||
func apiColor(id string) string {
|
||||
if len(id) < 2 { return apiAvatarColors[0] }
|
||||
b, _ := strconv.ParseUint(id[len(id)-2:], 16, 8)
|
||||
return apiAvatarColors[b%uint64(len(apiAvatarColors))]
|
||||
}
|
||||
|
||||
func apiAge(dob string) string {
|
||||
if len(dob) < 10 { return "" }
|
||||
t, err := time.Parse("2006-01-02", dob[:10])
|
||||
if err != nil { return "" }
|
||||
years := int(time.Since(t).Hours() / 8766)
|
||||
return fmt.Sprintf("%dy", years)
|
||||
}
|
||||
|
||||
func apiDOB(dob string) string {
|
||||
if len(dob) < 10 { return "" }
|
||||
return dob[:10]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -255,3 +255,42 @@ func normalizeQuery(next http.HandlerFunc) http.HandlerFunc {
|
|||
next(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
// httpsRedirectMiddleware redirects HTTP requests to HTTPS
|
||||
// Respects X-Forwarded-Proto header for deployments behind reverse proxy
|
||||
func httpsRedirectMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Skip redirect for health checks and local development
|
||||
if r.URL.Path == "/api/v1/health" || isLocalIP(getIP(r)) {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if request is already HTTPS (direct or via proxy)
|
||||
isHTTPS := r.TLS != nil || r.Header.Get("X-Forwarded-Proto") == "https"
|
||||
|
||||
if !isHTTPS {
|
||||
// Build HTTPS URL
|
||||
target := "https://" + r.Host + r.URL.RequestURI()
|
||||
http.Redirect(w, r, target, http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// hstsMiddleware adds HTTP Strict Transport Security headers
|
||||
func hstsMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Only add HSTS header for HTTPS requests
|
||||
isHTTPS := r.TLS != nil || r.Header.Get("X-Forwarded-Proto") == "https"
|
||||
|
||||
if isHTTPS {
|
||||
// max-age=1 year (31536000 seconds), include subdomains, preload ready
|
||||
w.Header().Set("Strict-Transport-Security", "max-age=31536000; includeSubDomains; preload")
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1084,6 +1084,107 @@ func handleDossierV2(w http.ResponseWriter, r *http.Request) {
|
|||
})
|
||||
}
|
||||
|
||||
// handleLabCommentary generates AI commentary for lab trend charts.
|
||||
// POST /dossier/{id}/labs/commentary
|
||||
// Body: {"series":[{"name":"Cholesterol","abbr":"CHOL","unit":"mg/dL","points":[{"date":"2024-01-15","val":210},...],"refLow":0,"refHigh":200,"direction":"lower_better"}]}
|
||||
// Returns: {"commentary":{"CHOL":"Cholesterol dropped 15% from 210 to 180 mg/dL since January — now within normal range.",...}}
|
||||
func handleLabCommentary(w http.ResponseWriter, r *http.Request) {
|
||||
p := getLoggedInDossier(r)
|
||||
if p == nil { http.Error(w, "Unauthorized", http.StatusUnauthorized); return }
|
||||
if r.Method != "POST" { http.Error(w, "Method not allowed", 405); return }
|
||||
if lib.AnthropicKey == "" {
|
||||
http.Error(w, "AI commentary unavailable", 503)
|
||||
return
|
||||
}
|
||||
|
||||
var body struct {
|
||||
Series []struct {
|
||||
Name string `json:"name"`
|
||||
Abbr string `json:"abbr"`
|
||||
Unit string `json:"unit"`
|
||||
Direction string `json:"direction"`
|
||||
RefLow float64 `json:"refLow"`
|
||||
RefHigh float64 `json:"refHigh"`
|
||||
Points []struct {
|
||||
Date string `json:"date"`
|
||||
Val float64 `json:"val"`
|
||||
} `json:"points"`
|
||||
} `json:"series"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil || len(body.Series) == 0 {
|
||||
http.Error(w, "Invalid request", 400)
|
||||
return
|
||||
}
|
||||
|
||||
// Build compact prompt
|
||||
var sb strings.Builder
|
||||
sb.WriteString("You are a medical data analyst. For each lab metric below, write ONE concise sentence (max 20 words) describing the trend for a patient. Focus on: direction (up/down/stable), magnitude (% or absolute change), and whether it's moving toward or away from the normal range. Use plain language, no jargon. Do NOT give medical advice or diagnoses.\n\n")
|
||||
sb.WriteString("Format your response as JSON: {\"ABBR\": \"sentence\", ...}\n\n")
|
||||
sb.WriteString("Metrics:\n")
|
||||
for _, s := range body.Series {
|
||||
if len(s.Points) < 2 { continue }
|
||||
first := s.Points[0]
|
||||
last := s.Points[len(s.Points)-1]
|
||||
pct := 0.0
|
||||
if first.Val != 0 { pct = (last.Val - first.Val) / first.Val * 100 }
|
||||
var refStr string
|
||||
if s.RefHigh > 0 || s.RefLow > 0 {
|
||||
refStr = fmt.Sprintf(", normal range: %.1f–%.1f %s", s.RefLow, s.RefHigh, s.Unit)
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf("- %s (%s): %.1f→%.1f %s (%+.0f%%) from %s to %s%s\n",
|
||||
s.Name, s.Abbr, first.Val, last.Val, s.Unit, pct, first.Date, last.Date, refStr))
|
||||
}
|
||||
|
||||
reqBody, _ := json.Marshal(map[string]interface{}{
|
||||
"model": "claude-haiku-4-5",
|
||||
"max_tokens": 512,
|
||||
"messages": []map[string]interface{}{
|
||||
{"role": "user", "content": sb.String()},
|
||||
},
|
||||
})
|
||||
|
||||
req, err := http.NewRequest("POST", "https://api.anthropic.com/v1/messages", strings.NewReader(string(reqBody)))
|
||||
if err != nil { http.Error(w, "Request error", 500); return }
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("x-api-key", lib.AnthropicKey)
|
||||
req.Header.Set("anthropic-version", "2023-06-01")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil { http.Error(w, "API error", 502); return }
|
||||
defer resp.Body.Close()
|
||||
|
||||
var result struct {
|
||||
Content []struct{ Text string `json:"text"` } `json:"content"`
|
||||
Error struct{ Message string `json:"message"` } `json:"error"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil || len(result.Content) == 0 {
|
||||
http.Error(w, "Bad API response", 502)
|
||||
return
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
http.Error(w, "API error: "+result.Error.Message, 502)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse the JSON response from Claude (it may be wrapped in markdown code fence)
|
||||
raw := strings.TrimSpace(result.Content[0].Text)
|
||||
raw = strings.TrimPrefix(raw, "```json")
|
||||
raw = strings.TrimPrefix(raw, "```")
|
||||
raw = strings.TrimSuffix(raw, "```")
|
||||
raw = strings.TrimSpace(raw)
|
||||
|
||||
var commentary map[string]string
|
||||
if err := json.Unmarshal([]byte(raw), &commentary); err != nil {
|
||||
// Return the raw text as a fallback under "_raw"
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]interface{}{"commentary": map[string]string{"_raw": raw}})
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]interface{}{"commentary": commentary})
|
||||
}
|
||||
|
||||
// handleLabSearch serves lab data as JSON.
|
||||
// GET /dossier/{id}/labs?order={entryID} — children for one order (expand)
|
||||
// GET /dossier/{id}/labs?q=sodium — search across all orders
|
||||
|
|
@ -1182,18 +1283,17 @@ func handleLabSearch(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
// Build search index: term → []loinc
|
||||
var tests []lib.LabTest
|
||||
lib.RefQuery("SELECT loinc_id, name FROM lab_test", nil, &tests)
|
||||
var loincEntries []lib.LoincInfo
|
||||
lib.RefQuery("SELECT loinc_num, long_name, short_name, component, system, property FROM loinc_lab", nil, &loincEntries)
|
||||
searchIndex := make(map[string][]string)
|
||||
for _, test := range tests {
|
||||
name := strings.ToLower(test.Name)
|
||||
if !contains(searchIndex[name], test.LoincID) {
|
||||
searchIndex[name] = append(searchIndex[name], test.LoincID)
|
||||
}
|
||||
for _, word := range strings.Fields(name) {
|
||||
word = strings.Trim(word, "()")
|
||||
if len(word) >= 3 && !contains(searchIndex[word], test.LoincID) {
|
||||
searchIndex[word] = append(searchIndex[word], test.LoincID)
|
||||
for _, l := range loincEntries {
|
||||
// Index by long_name words and component words
|
||||
for _, src := range []string{l.LongName, l.Component} {
|
||||
for _, word := range strings.Fields(strings.ToLower(src)) {
|
||||
word = strings.Trim(word, "()[].,/")
|
||||
if len(word) >= 3 && !contains(searchIndex[word], l.Code) {
|
||||
searchIndex[word] = append(searchIndex[word], l.Code)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1262,10 +1362,12 @@ func handleLabSearch(w http.ResponseWriter, r *http.Request) {
|
|||
matchedOrders = append(matchedOrders, oj)
|
||||
}
|
||||
|
||||
// LOINC name map
|
||||
// LOINC name map — use official long_name from loinc_lab
|
||||
loincNameMap := make(map[string]string)
|
||||
for _, t := range tests {
|
||||
if matchLoincs[t.LoincID] { loincNameMap[t.LoincID] = t.Name }
|
||||
for _, l := range loincEntries {
|
||||
if matchLoincs[l.Code] {
|
||||
loincNameMap[l.Code] = l.LongName
|
||||
}
|
||||
}
|
||||
if matchedOrders == nil { matchedOrders = []orderJSON{} }
|
||||
|
||||
|
|
|
|||
|
|
@ -347,6 +347,7 @@ func processGenomeUpload(uploadID string, dossierID string, filePath string) {
|
|||
|
||||
parentEntry := &lib.Entry{
|
||||
DossierID: dossierID,
|
||||
ParentID: dossierID,
|
||||
Category: lib.CategoryGenome,
|
||||
Type: "extraction",
|
||||
Value: format,
|
||||
|
|
|
|||
|
|
@ -398,20 +398,98 @@ func isMinor(dob string) bool {
|
|||
return age < minAge
|
||||
}
|
||||
func getLoggedInDossier(r *http.Request) *lib.Dossier {
|
||||
cookie, err := r.Cookie("login")
|
||||
if err != nil || cookie.Value == "" { return nil }
|
||||
d, err := lib.DossierGet(cookie.Value, cookie.Value)
|
||||
dossierID := getSessionDossier(r)
|
||||
if dossierID == "" {
|
||||
// Check for legacy login cookie (migration path)
|
||||
if cookie, err := r.Cookie("login"); err == nil && cookie.Value != "" {
|
||||
dossierID = cookie.Value
|
||||
}
|
||||
}
|
||||
if dossierID == "" { return nil }
|
||||
d, err := lib.DossierGet(dossierID, dossierID)
|
||||
if err != nil { return nil }
|
||||
return d
|
||||
}
|
||||
|
||||
|
||||
func setLoginCookie(w http.ResponseWriter, dossierID string) {
|
||||
http.SetCookie(w, &http.Cookie{Name: "login", Value: dossierID, Path: "/", MaxAge: 30*24*60*60, HttpOnly: true, Secure: true, SameSite: http.SameSiteLaxMode})
|
||||
// Session management with secure tokens
|
||||
var sessionCache = make(map[string]string) // token -> dossierID
|
||||
|
||||
func setLoginCookie(w http.ResponseWriter, dossierID string) string {
|
||||
// Generate new session token (session fixation protection)
|
||||
token := generateSessionToken()
|
||||
sessionCache[token] = dossierID
|
||||
|
||||
// Store in auth DB for persistence across restarts
|
||||
lib.SessionCreate(token, dossierID, 30*24*60*60) // 30 days
|
||||
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: "session",
|
||||
Value: token,
|
||||
Path: "/",
|
||||
MaxAge: 30 * 24 * 60 * 60,
|
||||
HttpOnly: true,
|
||||
Secure: true,
|
||||
SameSite: http.SameSiteLaxMode,
|
||||
})
|
||||
// Set cache-control headers to prevent session token caching
|
||||
w.Header().Set("Cache-Control", "no-store, private, no-cache, must-revalidate")
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("Expires", "0")
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
func clearLoginCookie(w http.ResponseWriter) {
|
||||
http.SetCookie(w, &http.Cookie{Name: "login", Value: "", Path: "/", MaxAge: -1, HttpOnly: true, Secure: true})
|
||||
func clearLoginCookie(w http.ResponseWriter, r *http.Request) {
|
||||
// Server-side session invalidation
|
||||
if cookie, err := r.Cookie("session"); err == nil && cookie.Value != "" {
|
||||
delete(sessionCache, cookie.Value)
|
||||
lib.SessionDelete(cookie.Value)
|
||||
}
|
||||
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: "session",
|
||||
Value: "",
|
||||
Path: "/",
|
||||
MaxAge: -1,
|
||||
HttpOnly: true,
|
||||
Secure: true,
|
||||
SameSite: http.SameSiteLaxMode,
|
||||
})
|
||||
|
||||
// Also clear legacy login cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: "login",
|
||||
Value: "",
|
||||
Path: "/",
|
||||
MaxAge: -1,
|
||||
HttpOnly: true,
|
||||
Secure: true,
|
||||
})
|
||||
|
||||
w.Header().Set("Cache-Control", "no-store, private, no-cache, must-revalidate")
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("Expires", "0")
|
||||
}
|
||||
|
||||
// getSessionDossier validates session token and returns dossierID
|
||||
func getSessionDossier(r *http.Request) string {
|
||||
cookie, err := r.Cookie("session")
|
||||
if err != nil || cookie.Value == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Check memory cache first
|
||||
if dossierID, ok := sessionCache[cookie.Value]; ok {
|
||||
return dossierID
|
||||
}
|
||||
|
||||
// Fall back to DB
|
||||
dossierID := lib.SessionGetDossier(cookie.Value)
|
||||
if dossierID != "" {
|
||||
sessionCache[cookie.Value] = dossierID
|
||||
}
|
||||
return dossierID
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -659,7 +737,7 @@ func handleOnboard(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
|
||||
func handleLogout(w http.ResponseWriter, r *http.Request) {
|
||||
clearLoginCookie(w)
|
||||
clearLoginCookie(w, r)
|
||||
http.Redirect(w, r, "/", http.StatusSeeOther)
|
||||
}
|
||||
|
||||
|
|
@ -2154,6 +2232,7 @@ func setupMux() http.Handler {
|
|||
} else if strings.Contains(path, "/files/") && strings.HasSuffix(path, "/reprocess") { handleReprocess(w, r)
|
||||
} else if strings.HasSuffix(path, "/process-imaging") { handleProcessImaging(w, r)
|
||||
} else if strings.HasSuffix(path, "/process-status") { handleProcessStatus(w, r)
|
||||
} else if strings.HasSuffix(path, "/labs/commentary") { handleLabCommentary(w, r)
|
||||
} else if strings.HasSuffix(path, "/labs") { handleLabSearch(w, r)
|
||||
} else if strings.Contains(path, "/document/") { handleDocumentView(w, r)
|
||||
} else { handleDossierV2(w, r) }
|
||||
|
|
@ -2186,7 +2265,7 @@ func setupMux() http.Handler {
|
|||
// MCP HTTP server (for Anthropic Connectors Directory)
|
||||
RegisterMCPRoutes(mux)
|
||||
|
||||
return defense(notifyOn404(logMiddleware(mux)))
|
||||
return hstsMiddleware(httpsRedirectMiddleware(defense(notifyOn404(logMiddleware(mux)))))
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
|
|
|||
|
|
@ -195,13 +195,12 @@ func handleMCP(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
// Check authorization
|
||||
var dossierID, accessToken string
|
||||
var dossierID string
|
||||
|
||||
// Local LAN backdoor — skip OAuth for agents on 192.168.0.0/22
|
||||
remoteIP := strings.Split(r.RemoteAddr, ":")[0]
|
||||
if strings.HasPrefix(remoteIP, "192.168.0.") || strings.HasPrefix(remoteIP, "192.168.1.") || strings.HasPrefix(remoteIP, "192.168.2.") || strings.HasPrefix(remoteIP, "192.168.3.") {
|
||||
dossierID = "6e4e8192881a7494"
|
||||
accessToken = ""
|
||||
fmt.Printf("[MCP] Local gateway access as dossier: %s\n", dossierID)
|
||||
} else {
|
||||
auth := r.Header.Get("Authorization")
|
||||
|
|
@ -243,7 +242,6 @@ func handleMCP(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
dossierID = token.DossierID
|
||||
accessToken = tokenStr
|
||||
}
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
|
|
@ -267,7 +265,7 @@ func handleMCP(w http.ResponseWriter, r *http.Request) {
|
|||
case "tools/list":
|
||||
handleMCPToolsList(w, req)
|
||||
case "tools/call":
|
||||
handleMCPToolsCall(w, req, accessToken, dossierID)
|
||||
handleMCPToolsCall(w, req, dossierID)
|
||||
case "prompts/list":
|
||||
sendMCPResult(w, req.ID, map[string]interface{}{"prompts": []interface{}{}})
|
||||
case "prompts/get":
|
||||
|
|
@ -309,17 +307,13 @@ func handleMCPInitialize(w http.ResponseWriter, req mcpRequest) {
|
|||
},
|
||||
"instructions": "inou gives you access to a patient's raw health data — imaging, labs, genome, vitals, and more. " +
|
||||
"Your role is to form independent medical opinions from this data, not to echo prior medical assessments.\n\n" +
|
||||
"This is by design: radiology readings, lab reference ranges, and other professional interpretations have been " +
|
||||
"intentionally excluded. You are the diagnostician. Study the raw images, the raw lab values, the raw genomic " +
|
||||
"variants, and form your own conclusions.\n\n" +
|
||||
"Medical opinion categories (diagnoses, consultation notes, assessments, imaging reports) unlock after you've queried " +
|
||||
"all available raw data categories for the patient. A top-level listing counts — you don't need to review every entry, " +
|
||||
"just see what's there and use your judgement whether you need to dive deeper to answer your user's question.\n\n" +
|
||||
"Radiology reports, physician notes, and other professional interpretations do not exist in this system. " +
|
||||
"Do not ask for them or suggest they might be available. You are the diagnostician. " +
|
||||
"Study the raw images, the raw lab values, the raw genomic variants, and form your own conclusions.\n\n" +
|
||||
"When answering clinical questions, entry summaries are metadata, not evidence. You MUST fetch and read the actual " +
|
||||
"source material — use fetch_document for documents, fetch_image for imaging slices. " +
|
||||
"Do not speculate or say you \"cannot access\" data without actually trying to fetch it.\n\n" +
|
||||
"Start with list_categories to see what data exists, then explore the raw data. " +
|
||||
"Cheating is possible but hurts your user — anchoring on prior opinions defeats the purpose of independent analysis.",
|
||||
"Start with list_categories to see what data exists, then explore the raw data.",
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -421,12 +415,31 @@ func handleMCPToolsList(w http.ResponseWriter, req mcpRequest) {
|
|||
"inputSchema": map[string]interface{}{"type": "object", "properties": map[string]interface{}{}},
|
||||
"annotations": readOnly,
|
||||
},
|
||||
{
|
||||
"name": "create_entry",
|
||||
"title": "Create Entry",
|
||||
"description": "Write a new health entry directly into a dossier. Use for saving AI-generated insights, supplement notes, nutrition logs, or any health observation. Requires write permission on the dossier. Category must be one of the values returned by list_categories (e.g. 'supplement', 'nutrition', 'tracker'). Type is a sub-type within the category (e.g. 'vitamin', 'meal', 'note'). Value is a short human-readable label. Summary is a one-line description. Data is optional structured JSON for additional fields.",
|
||||
"inputSchema": map[string]interface{}{
|
||||
"type": "object",
|
||||
"properties": map[string]interface{}{
|
||||
"dossier": map[string]interface{}{"type": "string", "description": "Dossier ID (16-char hex)"},
|
||||
"category": map[string]interface{}{"type": "string", "description": "Category name (e.g. 'supplement', 'nutrition', 'tracker')"},
|
||||
"type": map[string]interface{}{"type": "string", "description": "Entry type within category (e.g. 'vitamin', 'meal', 'note')"},
|
||||
"value": map[string]interface{}{"type": "string", "description": "Short label or title for the entry"},
|
||||
"summary": map[string]interface{}{"type": "string", "description": "One-line description or AI-generated insight"},
|
||||
"data": map[string]interface{}{"type": "string", "description": "Optional JSON string with additional structured fields"},
|
||||
"parent": map[string]interface{}{"type": "string", "description": "Optional parent entry ID for hierarchical entries"},
|
||||
"timestamp": map[string]interface{}{"type": "number", "description": "Optional Unix timestamp (defaults to now)"},
|
||||
},
|
||||
"required": []string{"dossier", "category", "value"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
sendMCPResult(w, req.ID, map[string]interface{}{"tools": tools})
|
||||
}
|
||||
|
||||
func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, dossierID string) {
|
||||
func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, dossierID string) {
|
||||
var params struct {
|
||||
Name string `json:"name"`
|
||||
Arguments map[string]interface{} `json:"arguments"`
|
||||
|
|
@ -436,9 +449,6 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
|||
return
|
||||
}
|
||||
|
||||
// dossierID = authenticated user's ID (used for RBAC in all lib calls)
|
||||
// accessToken = forwarded to API for image/journal calls (API enforces RBAC)
|
||||
|
||||
switch params.Name {
|
||||
case "list_dossiers":
|
||||
result, err := mcpListDossiers(dossierID)
|
||||
|
|
@ -471,10 +481,14 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
|||
typ, _ := params.Arguments["type"].(string)
|
||||
searchKey, _ := params.Arguments["search_key"].(string)
|
||||
parent, _ := params.Arguments["parent"].(string)
|
||||
if parent == "" {
|
||||
if parent == "" && searchKey == "" {
|
||||
sendMCPResult(w, req.ID, mcpTextContent("ERROR: parent is required. Start with parent="+dossier+" (the dossier ID) to list top-level entries, then use returned entry IDs to navigate deeper."))
|
||||
return
|
||||
}
|
||||
// When parent is the dossier ID, search all top-level entries (ParentID="" or ParentID=dossierID)
|
||||
if parent == dossier {
|
||||
parent = ""
|
||||
}
|
||||
from, _ := params.Arguments["from"].(string)
|
||||
to, _ := params.Arguments["to"].(string)
|
||||
limit, _ := params.Arguments["limit"].(float64)
|
||||
|
|
@ -494,7 +508,7 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
|||
}
|
||||
wc, _ := params.Arguments["wc"].(float64)
|
||||
ww, _ := params.Arguments["ww"].(float64)
|
||||
result, err := mcpFetchImage(accessToken, dossier, slice, wc, ww)
|
||||
result, err := mcpFetchImage(dossierID, dossier, slice, wc, ww)
|
||||
if err != nil {
|
||||
sendMCPError(w, req.ID, -32000, err.Error())
|
||||
return
|
||||
|
|
@ -510,7 +524,7 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
|||
}
|
||||
wc, _ := params.Arguments["wc"].(float64)
|
||||
ww, _ := params.Arguments["ww"].(float64)
|
||||
result, err := mcpFetchContactSheet(accessToken, dossier, series, wc, ww)
|
||||
result, err := mcpFetchContactSheet(dossierID, dossier, series, wc, ww)
|
||||
if err != nil {
|
||||
sendMCPError(w, req.ID, -32000, err.Error())
|
||||
return
|
||||
|
|
@ -535,6 +549,30 @@ func handleMCPToolsCall(w http.ResponseWriter, req mcpRequest, accessToken, doss
|
|||
case "get_version":
|
||||
sendMCPResult(w, req.ID, mcpTextContent(fmt.Sprintf("Server: %s v%s", mcpServerName, mcpServerVersion)))
|
||||
|
||||
case "create_entry":
|
||||
dossier, _ := params.Arguments["dossier"].(string)
|
||||
category, _ := params.Arguments["category"].(string)
|
||||
typ, _ := params.Arguments["type"].(string)
|
||||
value, _ := params.Arguments["value"].(string)
|
||||
summary, _ := params.Arguments["summary"].(string)
|
||||
data, _ := params.Arguments["data"].(string)
|
||||
parent, _ := params.Arguments["parent"].(string)
|
||||
var ts int64
|
||||
if tsRaw, ok := params.Arguments["timestamp"]; ok {
|
||||
switch v := tsRaw.(type) {
|
||||
case float64:
|
||||
ts = int64(v)
|
||||
case int64:
|
||||
ts = v
|
||||
}
|
||||
}
|
||||
result, err := mcpCreateEntry(dossierID, dossier, category, typ, value, summary, data, parent, ts)
|
||||
if err != nil {
|
||||
sendMCPError(w, req.ID, -32000, err.Error())
|
||||
return
|
||||
}
|
||||
sendMCPResult(w, req.ID, mcpTextContent(result))
|
||||
|
||||
default:
|
||||
sendMCPError(w, req.ID, -32601, "Unknown tool: "+params.Name)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,52 +4,16 @@ import (
|
|||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"inou/lib"
|
||||
)
|
||||
|
||||
// MCP Tool Implementations
|
||||
// Data queries go through lib directly with RBAC enforcement.
|
||||
// Image rendering goes through the API (which also enforces RBAC via lib).
|
||||
|
||||
const apiBaseURL = "http://localhost:8082" // Internal API server (images only)
|
||||
|
||||
// mcpAPIGet calls the internal API with Bearer auth.
|
||||
func mcpAPIGet(accessToken, path string, params map[string]string) ([]byte, error) {
|
||||
v := url.Values{}
|
||||
for k, val := range params {
|
||||
if val != "" {
|
||||
v.Set(k, val)
|
||||
}
|
||||
}
|
||||
u := apiBaseURL + path
|
||||
if len(v) > 0 {
|
||||
u += "?" + v.Encode()
|
||||
}
|
||||
req, err := http.NewRequest("GET", u, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+accessToken)
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
// All tools go through lib directly with RBAC enforcement.
|
||||
|
||||
// --- Data query tools: all go through lib with RBAC ---
|
||||
|
||||
|
|
@ -139,6 +103,12 @@ func mcpGetCategories(dossier, accessorID string) (string, error) {
|
|||
// formatEntries converts entries to the standard MCP response format.
|
||||
func formatEntries(entries []*lib.Entry) string {
|
||||
var result []map[string]any
|
||||
|
||||
// Lazy-load dossier info for lab reference lookups
|
||||
var dossierSex string
|
||||
var dossierDOB int64
|
||||
var dossierLoaded bool
|
||||
|
||||
for _, e := range entries {
|
||||
entry := map[string]any{
|
||||
"id": e.EntryID,
|
||||
|
|
@ -150,12 +120,65 @@ func formatEntries(entries []*lib.Entry) string {
|
|||
"ordinal": e.Ordinal,
|
||||
"timestamp": e.Timestamp,
|
||||
}
|
||||
if e.SearchKey != "" {
|
||||
entry["search_key"] = e.SearchKey
|
||||
}
|
||||
if e.Data != "" {
|
||||
var d map[string]any
|
||||
if json.Unmarshal([]byte(e.Data), &d) == nil {
|
||||
entry["data"] = d
|
||||
}
|
||||
}
|
||||
// Enrich lab result entries with test name, reference range, and flag
|
||||
if e.Category == lib.CategoryLab && e.SearchKey != "" {
|
||||
test, err := lib.LabTestGet(e.SearchKey)
|
||||
if err == nil && test != nil {
|
||||
entry["test_name"] = test.Name
|
||||
|
||||
// Load dossier sex/DOB once
|
||||
if !dossierLoaded {
|
||||
dossierLoaded = true
|
||||
if d, err := lib.DossierGet("", e.DossierID); err == nil && d != nil {
|
||||
switch d.Sex {
|
||||
case 1:
|
||||
dossierSex = "M"
|
||||
case 2:
|
||||
dossierSex = "F"
|
||||
}
|
||||
dossierDOB = d.DOB.Unix()
|
||||
}
|
||||
}
|
||||
|
||||
// Look up reference range for this test at the patient's age at time of lab
|
||||
ts := e.Timestamp
|
||||
if ts == 0 {
|
||||
ts = int64(e.Ordinal) // fallback
|
||||
}
|
||||
if dossierDOB > 0 && ts > 0 {
|
||||
ageDays := lib.AgeDays(dossierDOB, ts)
|
||||
if ref, err := lib.LabRefLookup(e.SearchKey, dossierSex, ageDays); err == nil && ref != nil {
|
||||
siFactor := float64(test.SIFactor) / lib.LabScale
|
||||
if siFactor > 0 {
|
||||
low := lib.FromLabScale(ref.RefLow) / siFactor
|
||||
high := lib.FromLabScale(ref.RefHigh) / siFactor
|
||||
entry["ref_low"] = low
|
||||
entry["ref_high"] = high
|
||||
|
||||
// Compute flag from numeric value
|
||||
if numVal, ok := entry["value"].(string); ok {
|
||||
if v, err := strconv.ParseFloat(numVal, 64); err == nil {
|
||||
if ref.RefLow >= 0 && v < low {
|
||||
entry["flag"] = "L"
|
||||
} else if ref.RefHigh >= 0 && v > high {
|
||||
entry["flag"] = "H"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
switch e.Type {
|
||||
case "root":
|
||||
entry["hint"] = "Use list_entries with parent=" + e.EntryID + " to list studies"
|
||||
|
|
@ -170,40 +193,23 @@ func formatEntries(entries []*lib.Entry) string {
|
|||
return string(pretty)
|
||||
}
|
||||
|
||||
// --- Image tools: RBAC via lib, then API for rendering ---
|
||||
// --- Image tools: direct through lib ---
|
||||
|
||||
func mcpFetchImage(accessToken, dossier, slice string, wc, ww float64) (map[string]interface{}, error) {
|
||||
params := map[string]string{}
|
||||
if wc != 0 {
|
||||
params["wc"] = strconv.FormatFloat(wc, 'f', 0, 64)
|
||||
}
|
||||
if ww != 0 {
|
||||
params["ww"] = strconv.FormatFloat(ww, 'f', 0, 64)
|
||||
}
|
||||
|
||||
body, err := mcpAPIGet(accessToken, "/image/"+slice, params)
|
||||
func mcpFetchImage(accessorID, dossier, slice string, wc, ww float64) (map[string]interface{}, error) {
|
||||
opts := &lib.ImageOpts{WC: wc, WW: ww}
|
||||
body, err := lib.RenderImage(accessorID, slice, opts, 2000)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b64 := base64.StdEncoding.EncodeToString(body)
|
||||
return mcpImageContent(b64, "image/webp", fmt.Sprintf("Slice %s (%d bytes)", slice[:8], len(body))), nil
|
||||
}
|
||||
|
||||
func mcpFetchContactSheet(accessToken, dossier, series string, wc, ww float64) (map[string]interface{}, error) {
|
||||
params := map[string]string{}
|
||||
if wc != 0 {
|
||||
params["wc"] = strconv.FormatFloat(wc, 'f', 0, 64)
|
||||
}
|
||||
if ww != 0 {
|
||||
params["ww"] = strconv.FormatFloat(ww, 'f', 0, 64)
|
||||
}
|
||||
|
||||
body, err := mcpAPIGet(accessToken, "/contact-sheet.webp/"+series, params)
|
||||
func mcpFetchContactSheet(accessorID, dossier, series string, wc, ww float64) (map[string]interface{}, error) {
|
||||
body, err := lib.RenderContactSheet(accessorID, series, wc, ww)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b64 := base64.StdEncoding.EncodeToString(body)
|
||||
return mcpImageContent(b64, "image/webp", fmt.Sprintf("Contact sheet %s (%d bytes)", series[:8], len(body))), nil
|
||||
}
|
||||
|
|
@ -367,3 +373,56 @@ func docToTranslation(e *lib.Entry, data map[string]interface{}) (string, error)
|
|||
}
|
||||
return result.Content[0].Text, nil
|
||||
}
|
||||
|
||||
// mcpCreateEntry writes a new entry to the specified dossier.
|
||||
// accessorID is the MCP session's dossier ID (the authenticated user).
|
||||
// dossier is the target dossier (may differ for shared family dossiers).
|
||||
func mcpCreateEntry(accessorID, dossier, category, typ, value, summary, data, parent string, ts int64) (string, error) {
|
||||
if dossier == "" {
|
||||
return "", fmt.Errorf("dossier required")
|
||||
}
|
||||
if value == "" {
|
||||
return "", fmt.Errorf("value required")
|
||||
}
|
||||
|
||||
// Resolve category int
|
||||
catInt, ok := lib.CategoryFromString[category]
|
||||
if !ok {
|
||||
// Build a helpful error listing valid categories
|
||||
var valid []string
|
||||
for k := range lib.CategoryFromString {
|
||||
valid = append(valid, k)
|
||||
}
|
||||
return "", fmt.Errorf("unknown category %q — valid: %s", category, strings.Join(valid, ", "))
|
||||
}
|
||||
|
||||
// Validate data JSON if provided
|
||||
if data != "" {
|
||||
var probe interface{}
|
||||
if err := json.Unmarshal([]byte(data), &probe); err != nil {
|
||||
return "", fmt.Errorf("data must be valid JSON: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if ts == 0 {
|
||||
ts = time.Now().Unix()
|
||||
}
|
||||
|
||||
e := &lib.Entry{
|
||||
DossierID: dossier,
|
||||
Category: catInt,
|
||||
Type: typ,
|
||||
Value: value,
|
||||
Summary: summary,
|
||||
Data: data,
|
||||
ParentID: parent,
|
||||
Timestamp: ts,
|
||||
}
|
||||
|
||||
if err := lib.EntryWrite(accessorID, e); err != nil {
|
||||
return "", fmt.Errorf("write failed: %v", err)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("Entry created: id=%s category=%s type=%s value=%q timestamp=%d",
|
||||
e.EntryID, category, typ, value, ts), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"inou/lib"
|
||||
)
|
||||
|
||||
// CreateChatGPTClient creates the OAuth client for ChatGPT Custom GPT Actions.
|
||||
// Call this once during setup — see docs/chatgpt-actions-setup.md.
|
||||
func CreateChatGPTClient() error {
|
||||
_, err := lib.OAuthClientGet("chatgpt")
|
||||
if err == nil {
|
||||
return nil // Already exists
|
||||
}
|
||||
|
||||
redirectURIs := []string{
|
||||
"https://chat.openai.com/aip/g-*/oauth/callback",
|
||||
"https://chatgpt.com/aip/g-*/oauth/callback",
|
||||
}
|
||||
|
||||
client, secret, err := lib.OAuthClientCreate("ChatGPT Actions", redirectURIs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("Created ChatGPT OAuth client:\n")
|
||||
fmt.Printf(" Client ID: %s\n", client.ClientID)
|
||||
fmt.Printf(" Client Secret: %s\n", secret)
|
||||
fmt.Printf(" (Save the secret - it cannot be retrieved later)\n")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
/* Clavitor logo treatments — all use Sora from inou's font stack */
|
||||
|
||||
:root {
|
||||
--clavitor-amber: #B45309;
|
||||
--clavitor-dark: #1A1A1A;
|
||||
--clavitor-light: #F8F7F6;
|
||||
}
|
||||
|
||||
/* 1. Default — confident, tight, bold */
|
||||
.clavitor-logo {
|
||||
font-family: 'Sora', system-ui, sans-serif;
|
||||
font-weight: 700;
|
||||
letter-spacing: -0.03em;
|
||||
color: var(--clavitor-amber);
|
||||
}
|
||||
|
||||
/* 2. Editorial — airy, spaced, light */
|
||||
.clavitor-logo.editorial {
|
||||
font-weight: 300;
|
||||
letter-spacing: 0.08em;
|
||||
text-transform: lowercase;
|
||||
color: var(--clavitor-dark);
|
||||
}
|
||||
|
||||
/* 3. Heavy impact — extra bold, letter-spaced */
|
||||
.clavitor-logo.heavy {
|
||||
font-weight: 800;
|
||||
letter-spacing: 0.02em;
|
||||
text-transform: uppercase;
|
||||
color: var(--clavitor-dark);
|
||||
}
|
||||
|
||||
/* 4. Inverted — white on dark */
|
||||
.clavitor-logo.inverted {
|
||||
font-weight: 700;
|
||||
letter-spacing: -0.02em;
|
||||
color: white;
|
||||
text-shadow: 0 1px 3px rgba(0,0,0,0.3);
|
||||
}
|
||||
|
||||
/* 5. Gradient sweep */
|
||||
.clavitor-logo.gradient {
|
||||
font-weight: 700;
|
||||
letter-spacing: -0.02em;
|
||||
background: linear-gradient(90deg, var(--clavitor-amber), #FF8D46);
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
background-clip: text;
|
||||
}
|
||||
|
||||
/* 6. Weight-shifted — letters get heavier toward the end */
|
||||
.clavitor-logo.shifted {
|
||||
font-weight: 400;
|
||||
letter-spacing: -0.02em;
|
||||
}
|
||||
.clavitor-logo.shifted span:last-child { font-weight: 800; }
|
||||
|
||||
/* 7. Small-caps with bold first letter */
|
||||
.clavitor-logo.smallcaps {
|
||||
font-variant: small-caps;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.05em;
|
||||
text-transform: lowercase;
|
||||
}
|
||||
.clavitor-logo.smallcaps::first-letter {
|
||||
font-weight: 800;
|
||||
font-variant: normal;
|
||||
}
|
||||
|
||||
/* 8. Minimal — light, tight, subtle */
|
||||
.clavitor-logo.minimal {
|
||||
font-weight: 300;
|
||||
letter-spacing: -0.04em;
|
||||
opacity: 0.85;
|
||||
color: var(--clavitor-dark);
|
||||
}
|
||||
|
||||
/* 9. Hover lift (for links/buttons) */
|
||||
.clavitor-logo.interactive {
|
||||
font-weight: 700;
|
||||
letter-spacing: -0.03em;
|
||||
color: var(--clavitor-amber);
|
||||
transition: transform 0.15s ease, color 0.15s ease;
|
||||
display: inline-block;
|
||||
}
|
||||
.clavitor-logo.interactive:hover {
|
||||
transform: translateY(-1px);
|
||||
color: #9A4507;
|
||||
}
|
||||
|
|
@ -1263,9 +1263,27 @@ a:hover {
|
|||
.lab-chart {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
margin-bottom: 12px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
.lab-chart:last-child { margin-bottom: 0; }
|
||||
.lab-chart-wrap {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
.lab-chart-wrap:last-child { margin-bottom: 0; }
|
||||
.lab-commentary {
|
||||
min-height: 20px;
|
||||
padding: 0 4px 8px 4px;
|
||||
}
|
||||
.lab-commentary-loading {
|
||||
color: var(--text-muted, #9ca3af);
|
||||
font-size: 12px;
|
||||
animation: pulse 1.5s ease-in-out infinite;
|
||||
}
|
||||
.lab-commentary-text {
|
||||
font-size: 13px;
|
||||
color: var(--text-secondary, #6b7280);
|
||||
font-style: italic;
|
||||
}
|
||||
@keyframes pulse { 0%,100% { opacity: 0.4; } 50% { opacity: 1; } }
|
||||
|
||||
/* Expandable children */
|
||||
.data-row-children {
|
||||
|
|
|
|||
|
|
@ -432,6 +432,17 @@ async function init() {
|
|||
}
|
||||
}
|
||||
|
||||
// HTML entity encoder to prevent XSS
|
||||
function escapeHtml(text) {
|
||||
if (!text) return '';
|
||||
return text
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
async function addPanelEmpty() {
|
||||
const idx = panelCount++;
|
||||
const panel = { idx, seriesId: null, slices: [], currentSlice: 0 };
|
||||
|
|
@ -444,11 +455,11 @@ async function addPanelEmpty() {
|
|||
// Series header: show dropdown only if multiple series
|
||||
let headerContent;
|
||||
if (seriesList.length === 1) {
|
||||
headerContent = '<span class="series-name">' + seriesList[0].series_desc + ' (' + seriesList[0].slice_count + ')</span>';
|
||||
headerContent = '<span class="series-name">' + escapeHtml(seriesList[0].series_desc) + ' (' + seriesList[0].slice_count + ')</span>';
|
||||
} else {
|
||||
headerContent = '<select onchange="loadSeries(' + idx + ', this.value)">' +
|
||||
'<option value="">Select series...</option>' +
|
||||
seriesList.map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('') +
|
||||
seriesList.map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('') +
|
||||
'</select>';
|
||||
}
|
||||
|
||||
|
|
@ -645,7 +656,7 @@ async function add3DPanel(idx, orientation, seriesOptions) {
|
|||
'<span style="color:#B45309;margin-right:10px;font-weight:bold">' + orientation + '</span>' +
|
||||
'<select onchange="loadSeries(' + idx + ', this.value)">' +
|
||||
'<option value="">Select ' + orientation + ' series...</option>' +
|
||||
(seriesOptions || []).map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('') +
|
||||
(seriesOptions || []).map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('') +
|
||||
'</select></div>' +
|
||||
'<div class="panel-content"><div class="img-wrapper">' +
|
||||
'<img>' +
|
||||
|
|
@ -737,6 +748,58 @@ async function setPanels(count) {
|
|||
}
|
||||
}
|
||||
|
||||
// Compare mode: 2-panel split with current + prior study, same orientation,
|
||||
// sync scroll enabled. If only one study exists, loads same study in both panels.
|
||||
async function compareStudies() {
|
||||
if (studies.length < 1) return;
|
||||
|
||||
is3DMode = false;
|
||||
document.getElementById('studySelect3d').style.display = 'none';
|
||||
document.getElementById('panels').innerHTML = '';
|
||||
panels = [];
|
||||
panelCount = 0;
|
||||
|
||||
const studyA = studies[0]; // most recent
|
||||
const studyB = studies.length > 1 ? studies[1] : studyA; // prior (or same)
|
||||
|
||||
// Load both panels
|
||||
await addPanel();
|
||||
await addPanel();
|
||||
|
||||
// Load study A into panel 0
|
||||
currentStudyId = studyA.id;
|
||||
await changeStudyForPanel(0, studyA.id);
|
||||
|
||||
// Load study B into panel 1
|
||||
await changeStudyForPanel(1, studyB.id);
|
||||
|
||||
// After both are loaded, try to match orientation: prefer AX, then SAG, then COR
|
||||
const preferOri = ['AX', 'SAG', 'COR'];
|
||||
for (const ori of preferOri) {
|
||||
const sA = (panels[0].seriesList || []).find(s => s.orientation === ori || (s.series_desc || '').toUpperCase().startsWith(ori));
|
||||
const sB = (panels[1].seriesList || []).find(s => s.orientation === ori || (s.series_desc || '').toUpperCase().startsWith(ori));
|
||||
if (sA && sB) {
|
||||
const selA = document.getElementById('panel-0').querySelector('.series-select');
|
||||
const selB = document.getElementById('panel-1').querySelector('.series-select');
|
||||
if (selA) selA.value = sA.id;
|
||||
if (selB) selB.value = sB.id;
|
||||
await loadSeries(0, sA.id);
|
||||
await loadSeries(1, sB.id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Enable sync scroll
|
||||
const syncEl = document.getElementById('syncScroll');
|
||||
if (syncEl) syncEl.checked = true;
|
||||
|
||||
// Sync both panels to middle slice
|
||||
if (panels[0] && panels[0].slices.length) {
|
||||
const mid = Math.floor(panels[0].slices.length / 2);
|
||||
goToSlice(0, mid);
|
||||
}
|
||||
}
|
||||
|
||||
async function changeStudyForPanel(panelIdx, studyId) {
|
||||
currentStudyId = studyId;
|
||||
const panel = panels[panelIdx];
|
||||
|
|
@ -758,7 +821,7 @@ async function changeStudyForPanel(panelIdx, studyId) {
|
|||
const seriesSelect = div.querySelector('.series-select');
|
||||
if (seriesSelect) {
|
||||
seriesSelect.innerHTML = '<option value="">Select series...</option>' +
|
||||
panel.seriesList.map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('');
|
||||
panel.seriesList.map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('');
|
||||
}
|
||||
|
||||
// Auto-select best series
|
||||
|
|
@ -917,7 +980,7 @@ async function addPanel() {
|
|||
'</select>' +
|
||||
'<select class="series-select" onchange="loadSeries(' + idx + ', this.value)">' +
|
||||
'<option value="">Select series...</option>' +
|
||||
panel.seriesList.map(s => '<option value="' + s.id + '">' + s.series_desc + ' (' + s.slice_count + ')</option>').join('') +
|
||||
panel.seriesList.map(s => '<option value="' + s.id + '">' + escapeHtml(s.series_desc) + ' (' + s.slice_count + ')</option>').join('') +
|
||||
'</select>';
|
||||
|
||||
div.innerHTML =
|
||||
|
|
|
|||
|
|
@ -585,9 +585,56 @@ function renderFilterChart(card, table, q) {
|
|||
// Build display name: "Full Name (Abbr)" or fallback to abbreviation
|
||||
const fullName = loincNames[loinc] || s.abbr;
|
||||
const displayName = fullName !== s.abbr ? `${fullName} (${s.abbr})` : s.abbr;
|
||||
html += `<div class="lab-chart-wrap" data-abbr="${esc(s.abbr)}">`;
|
||||
html += buildSVGChart(displayName, s.unit, s.points, s.abbr, globalTMin, globalTMax);
|
||||
html += `<div class="lab-commentary" data-abbr="${esc(s.abbr)}"><span class="lab-commentary-loading">…</span></div>`;
|
||||
html += `</div>`;
|
||||
}
|
||||
body.innerHTML = html;
|
||||
|
||||
// Fetch AI commentary asynchronously — don't block chart render
|
||||
fetchLabCommentary(chartable, body);
|
||||
}
|
||||
|
||||
async function fetchLabCommentary(chartable, body) {
|
||||
const series = [];
|
||||
for (const [loinc, s] of chartable) {
|
||||
if (s.points.length < 2) continue;
|
||||
const ref = labRefData[s.abbr] || {};
|
||||
series.push({
|
||||
name: loincNames[loinc] || s.abbr,
|
||||
abbr: s.abbr,
|
||||
unit: s.unit,
|
||||
direction: ref.direction || '',
|
||||
refLow: ref.refLow || 0,
|
||||
refHigh: ref.refHigh || 0,
|
||||
points: s.points.map(p => ({
|
||||
date: p.date.toISOString().slice(0,10),
|
||||
val: p.val,
|
||||
})),
|
||||
});
|
||||
}
|
||||
if (series.length === 0) return;
|
||||
|
||||
try {
|
||||
const resp = await fetch(`/dossier/${dossierGUID}/labs/commentary`, {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify({series}),
|
||||
});
|
||||
if (!resp.ok) return;
|
||||
const data = await resp.json();
|
||||
const commentary = data.commentary || {};
|
||||
for (const [abbr, text] of Object.entries(commentary)) {
|
||||
if (abbr === '_raw') continue;
|
||||
const el = body.querySelector(`.lab-commentary[data-abbr="${abbr}"]`);
|
||||
if (el) el.innerHTML = `<span class="lab-commentary-text">💡 ${esc(text)}</span>`;
|
||||
}
|
||||
// Clear any remaining loading spinners
|
||||
body.querySelectorAll('.lab-commentary-loading').forEach(el => el.remove());
|
||||
} catch (_) {
|
||||
body.querySelectorAll('.lab-commentary-loading').forEach(el => el.remove());
|
||||
}
|
||||
}
|
||||
|
||||
function buildSVGChart(name, unit, points, abbr, globalTMin, globalTMax) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,499 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"inou/lib"
|
||||
)
|
||||
|
||||
const refDBPath = "/tank/inou/data/reference.db"
|
||||
|
||||
var refDB *sql.DB
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 2 {
|
||||
fmt.Fprintln(os.Stderr, `Usage:
|
||||
loinc-lookup import <loinc_lab.csv> Import LOINC lab table into reference.db
|
||||
loinc-lookup <name> [specimen] [unit] Look up LOINC code for a lab test
|
||||
loinc-lookup batch <file.jsonl> Batch lookup from JSONL (one {"name","specimen","unit"} per line)
|
||||
loinc-lookup stats Show cache statistics`)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
lib.ConfigInit()
|
||||
if err := lib.RefDBInit(refDBPath); err != nil {
|
||||
log.Fatalf("RefDBInit: %v", err)
|
||||
}
|
||||
var err error
|
||||
refDB, err = sql.Open("sqlite3", refDBPath)
|
||||
if err != nil {
|
||||
log.Fatalf("open reference.db: %v", err)
|
||||
}
|
||||
defer refDB.Close()
|
||||
|
||||
switch os.Args[1] {
|
||||
case "import":
|
||||
if len(os.Args) < 3 {
|
||||
log.Fatal("Usage: loinc-lookup import <loinc_lab.csv>")
|
||||
}
|
||||
cmdImport(os.Args[2])
|
||||
case "stats":
|
||||
cmdStats()
|
||||
case "batch":
|
||||
if len(os.Args) < 3 {
|
||||
log.Fatal("Usage: loinc-lookup batch <file.jsonl>")
|
||||
}
|
||||
cmdBatch(os.Args[2])
|
||||
default:
|
||||
name := os.Args[1]
|
||||
specimen := ""
|
||||
unit := ""
|
||||
if len(os.Args) > 2 {
|
||||
specimen = os.Args[2]
|
||||
}
|
||||
if len(os.Args) > 3 {
|
||||
unit = os.Args[3]
|
||||
}
|
||||
cmdLookup(name, specimen, unit)
|
||||
}
|
||||
}
|
||||
|
||||
// --- import command ---
|
||||
|
||||
func cmdImport(csvPath string) {
|
||||
// Create tables
|
||||
for _, stmt := range []string{
|
||||
`CREATE TABLE IF NOT EXISTS loinc_lab (
|
||||
loinc_num TEXT PRIMARY KEY,
|
||||
component TEXT NOT NULL,
|
||||
property TEXT NOT NULL,
|
||||
system TEXT NOT NULL,
|
||||
scale TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
class TEXT NOT NULL,
|
||||
long_name TEXT NOT NULL,
|
||||
short_name TEXT NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS loinc_cache (
|
||||
cache_key TEXT PRIMARY KEY,
|
||||
input_name TEXT NOT NULL,
|
||||
input_specimen TEXT NOT NULL,
|
||||
input_unit TEXT NOT NULL,
|
||||
loinc_code TEXT NOT NULL,
|
||||
loinc_name TEXT NOT NULL,
|
||||
confidence TEXT NOT NULL DEFAULT 'llm'
|
||||
)`,
|
||||
} {
|
||||
if _, err := refDB.Exec(stmt); err != nil {
|
||||
log.Fatalf("create table: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
f, err := os.Open(csvPath)
|
||||
if err != nil {
|
||||
log.Fatalf("open %s: %v", csvPath, err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
reader := csv.NewReader(f)
|
||||
header, err := reader.Read()
|
||||
if err != nil {
|
||||
log.Fatalf("read header: %v", err)
|
||||
}
|
||||
|
||||
// Map column names to indices
|
||||
colIdx := map[string]int{}
|
||||
for i, h := range header {
|
||||
colIdx[h] = i
|
||||
}
|
||||
need := []string{"LOINC_NUM", "COMPONENT", "PROPERTY", "SYSTEM", "SCALE_TYP", "METHOD_TYP", "CLASS", "LONG_COMMON_NAME", "SHORTNAME"}
|
||||
for _, n := range need {
|
||||
if _, ok := colIdx[n]; !ok {
|
||||
log.Fatalf("missing column: %s", n)
|
||||
}
|
||||
}
|
||||
|
||||
// Clear and re-import
|
||||
refDB.Exec("DELETE FROM loinc_lab")
|
||||
|
||||
tx, err := refDB.Begin()
|
||||
if err != nil {
|
||||
log.Fatalf("begin tx: %v", err)
|
||||
}
|
||||
stmt, err := tx.Prepare(`INSERT INTO loinc_lab (loinc_num, component, property, system, scale, method, class, long_name, short_name)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`)
|
||||
if err != nil {
|
||||
log.Fatalf("prepare: %v", err)
|
||||
}
|
||||
|
||||
count := 0
|
||||
for {
|
||||
row, err := reader.Read()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
stmt.Exec(
|
||||
row[colIdx["LOINC_NUM"]],
|
||||
row[colIdx["COMPONENT"]],
|
||||
row[colIdx["PROPERTY"]],
|
||||
row[colIdx["SYSTEM"]],
|
||||
row[colIdx["SCALE_TYP"]],
|
||||
row[colIdx["METHOD_TYP"]],
|
||||
row[colIdx["CLASS"]],
|
||||
row[colIdx["LONG_COMMON_NAME"]],
|
||||
row[colIdx["SHORTNAME"]],
|
||||
)
|
||||
count++
|
||||
}
|
||||
stmt.Close()
|
||||
if err := tx.Commit(); err != nil {
|
||||
log.Fatalf("commit: %v", err)
|
||||
}
|
||||
log.Printf("Imported %d LOINC lab codes", count)
|
||||
}
|
||||
|
||||
// --- lookup command ---
|
||||
|
||||
func cmdLookup(name, specimen, unit string) {
|
||||
result, err := loincLookup(name, specimen, unit)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
out, _ := json.MarshalIndent(result, "", " ")
|
||||
fmt.Println(string(out))
|
||||
}
|
||||
|
||||
type LookupResult struct {
|
||||
LoincCode string `json:"loinc_code"`
|
||||
LoincName string `json:"loinc_name"`
|
||||
Source string `json:"source"` // "cache" or "llm"
|
||||
Candidates int `json:"candidates"`
|
||||
}
|
||||
|
||||
func loincLookup(name, specimen, unit string) (*LookupResult, error) {
|
||||
// 1. Check cache
|
||||
cacheKey := strings.ToLower(name + "|" + specimen + "|" + unit)
|
||||
var cached []struct {
|
||||
LoincCode string `db:"loinc_code"`
|
||||
LoincName string `db:"loinc_name"`
|
||||
}
|
||||
lib.RefQuery("SELECT loinc_code, loinc_name FROM loinc_cache WHERE cache_key = ?", []any{cacheKey}, &cached)
|
||||
if len(cached) > 0 {
|
||||
return &LookupResult{
|
||||
LoincCode: cached[0].LoincCode,
|
||||
LoincName: cached[0].LoincName,
|
||||
Source: "cache",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// 2. Expand input to LOINC terminology via LLM, then search
|
||||
// Replace "%" with "percentage" so LLM connects to LOINC's "/100 leukocytes" naming
|
||||
lookupUnit := unit
|
||||
if lookupUnit == "%" {
|
||||
lookupUnit = "percentage"
|
||||
}
|
||||
tokens := tokenize(name + " " + specimen + " " + lookupUnit)
|
||||
if expanded, err := llmExpand(name, specimen, lookupUnit); err == nil {
|
||||
tokens = expanded
|
||||
}
|
||||
candidates, _ := searchCandidates(tokens)
|
||||
|
||||
// If unit is %, drop candidates that are counts (#/volume, NCnc)
|
||||
if unit == "%" {
|
||||
var filtered []candidate
|
||||
for _, c := range candidates {
|
||||
if c.Property == "NCnc" {
|
||||
continue
|
||||
}
|
||||
filtered = append(filtered, c)
|
||||
}
|
||||
if len(filtered) > 0 {
|
||||
candidates = filtered
|
||||
}
|
||||
}
|
||||
|
||||
if len(candidates) == 0 {
|
||||
return nil, fmt.Errorf("no LOINC candidates found for %q", name)
|
||||
}
|
||||
|
||||
// 4. LLM pick from candidates
|
||||
code, lname, err := llmPick(name, specimen, lookupUnit, candidates)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// 5. Cache
|
||||
refDB.Exec(`INSERT OR REPLACE INTO loinc_cache (cache_key, input_name, input_specimen, input_unit, loinc_code, loinc_name, confidence)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 'llm')`, cacheKey, name, specimen, unit, code, lname)
|
||||
|
||||
return &LookupResult{
|
||||
LoincCode: code,
|
||||
LoincName: lname,
|
||||
Source: "llm",
|
||||
Candidates: len(candidates),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func tokenize(s string) []string {
|
||||
s = strings.ToLower(s)
|
||||
// Replace common separators with spaces
|
||||
for _, c := range []string{",", ";", "(", ")", "[", "]", "/", "-", ".", ":"} {
|
||||
s = strings.ReplaceAll(s, c, " ")
|
||||
}
|
||||
var tokens []string
|
||||
seen := map[string]bool{}
|
||||
for _, t := range strings.Fields(s) {
|
||||
if len(t) < 2 || seen[t] {
|
||||
continue
|
||||
}
|
||||
tokens = append(tokens, t)
|
||||
seen[t] = true
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
type candidate struct {
|
||||
LoincNum string `db:"loinc_num"`
|
||||
LongName string `db:"long_name"`
|
||||
ShortName string `db:"short_name"`
|
||||
System string `db:"system"`
|
||||
Component string `db:"component"`
|
||||
Property string `db:"property"`
|
||||
}
|
||||
|
||||
func searchCandidates(tokens []string) ([]candidate, int) {
|
||||
if len(tokens) == 0 {
|
||||
return nil, 0
|
||||
}
|
||||
|
||||
// Query per token, collect into a map keyed by loinc_num
|
||||
type entry struct {
|
||||
c candidate
|
||||
hits int // number of distinct tokens that matched
|
||||
bonus int // extra score for quality of match
|
||||
}
|
||||
entries := map[string]*entry{}
|
||||
|
||||
for _, t := range tokens {
|
||||
pattern := "%" + t + "%"
|
||||
query := "SELECT loinc_num, long_name, short_name, system, component, property FROM loinc_lab WHERE " +
|
||||
"LOWER(long_name) LIKE ? OR LOWER(short_name) LIKE ? OR LOWER(component) LIKE ?"
|
||||
var results []candidate
|
||||
lib.RefQuery(query, []any{pattern, pattern, pattern}, &results)
|
||||
for _, c := range results {
|
||||
if e, ok := entries[c.LoincNum]; ok {
|
||||
e.hits++
|
||||
} else {
|
||||
entries[c.LoincNum] = &entry{c: c, hits: 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Require at least 2 token matches (or 1 if only 1 token)
|
||||
minHits := 2
|
||||
if len(tokens) <= 1 {
|
||||
minHits = 1
|
||||
}
|
||||
|
||||
// Score: hits × 100 + bonus for component exactness (shorter component = more specific)
|
||||
type scored struct {
|
||||
c candidate
|
||||
score int
|
||||
}
|
||||
var scoredResults []scored
|
||||
for _, e := range entries {
|
||||
if e.hits < minHits {
|
||||
continue
|
||||
}
|
||||
s := e.hits * 100
|
||||
// Bonus: prefer entries where component is a simple term, not a compound like "Carboxyhemoglobin/Hemoglobin.total"
|
||||
compLen := len(e.c.Component)
|
||||
if compLen > 0 && compLen < 50 {
|
||||
s += 50 - compLen // shorter component = higher bonus
|
||||
}
|
||||
// Bonus: prefer entries without "/" in component (simple analytes)
|
||||
if !strings.Contains(e.c.Component, "/") {
|
||||
s += 20
|
||||
}
|
||||
scoredResults = append(scoredResults, scored{e.c, s})
|
||||
}
|
||||
|
||||
// Sort by score descending, take top 30
|
||||
for i := range scoredResults {
|
||||
for j := i + 1; j < len(scoredResults); j++ {
|
||||
if scoredResults[j].score > scoredResults[i].score {
|
||||
scoredResults[i], scoredResults[j] = scoredResults[j], scoredResults[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
var top []candidate
|
||||
maxHits := 0
|
||||
for i, s := range scoredResults {
|
||||
if i >= 30 {
|
||||
break
|
||||
}
|
||||
top = append(top, s.c)
|
||||
hits := s.score / 100 // extract hit count from score
|
||||
if hits > maxHits {
|
||||
maxHits = hits
|
||||
}
|
||||
}
|
||||
return top, maxHits
|
||||
}
|
||||
|
||||
func llmExpand(name, specimen, unit string) ([]string, error) {
|
||||
prompt := fmt.Sprintf(`Given a lab test, return search terms to find it in the LOINC database.
|
||||
LOINC uses formal medical terminology (e.g. "Leukocytes" not "White Blood Cells", "Erythrocytes" not "Red Blood Cells", "Oxygen" not "O2" or "pO2").
|
||||
|
||||
Lab test:
|
||||
Name: %s
|
||||
Specimen: %s
|
||||
Unit: %s
|
||||
|
||||
Return a JSON object: {"terms": ["term1", "term2", ...]}
|
||||
Include: the LOINC component name, specimen system code (e.g. Bld, BldA, BldC, BldV, Ser/Plas, Urine), and any synonyms that might appear in LOINC long names.
|
||||
Keep it to 3-6 terms. JSON only.`, name, specimen, unit)
|
||||
|
||||
resp, err := lib.CallGemini(prompt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Terms []string `json:"terms"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||
return nil, fmt.Errorf("parse expand response %q: %w", resp, err)
|
||||
}
|
||||
|
||||
// Lowercase all terms and add original input tokens as fallback
|
||||
var terms []string
|
||||
seen := map[string]bool{}
|
||||
for _, t := range result.Terms {
|
||||
t = strings.ToLower(strings.TrimSpace(t))
|
||||
if t != "" && !seen[t] {
|
||||
terms = append(terms, t)
|
||||
seen[t] = true
|
||||
}
|
||||
}
|
||||
// Also include original tokens so we never lose the raw input
|
||||
for _, t := range tokenize(name + " " + specimen) {
|
||||
if !seen[t] {
|
||||
terms = append(terms, t)
|
||||
seen[t] = true
|
||||
}
|
||||
}
|
||||
return terms, nil
|
||||
}
|
||||
|
||||
func llmPick(name, specimen, unit string, candidates []candidate) (string, string, error) {
|
||||
// Format candidates as a numbered list
|
||||
// Replace "/100" and "fraction" with "percentage" so LLM connects them to "%" unit
|
||||
var lines []string
|
||||
for i, c := range candidates {
|
||||
display := c.LongName
|
||||
display = strings.ReplaceAll(display, "/100 ", "percentage of ")
|
||||
display = strings.ReplaceAll(display, "fraction", "percentage")
|
||||
lines = append(lines, fmt.Sprintf("%d. %s — %s [System: %s]", i+1, c.LoincNum, display, c.System))
|
||||
}
|
||||
|
||||
prompt := fmt.Sprintf(`You are a clinical laboratory informatics system. Given a lab test, pick the BEST matching LOINC code from the candidate list.
|
||||
|
||||
Lab test:
|
||||
Name: %s
|
||||
Specimen: %s
|
||||
Unit: %s
|
||||
|
||||
Candidates:
|
||||
%s
|
||||
|
||||
Return ONLY a JSON object: {"pick": <number>, "loinc": "<code>", "name": "<long name>"}
|
||||
Pick the candidate that best matches the test name, specimen type, and unit. If none match well, pick the closest.
|
||||
JSON only, no explanation.`, name, specimen, unit, strings.Join(lines, "\n"))
|
||||
|
||||
resp, err := lib.CallGemini(prompt)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("LLM call failed: %w", err)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Pick int `json:"pick"`
|
||||
Loinc string `json:"loinc"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(resp), &result); err != nil {
|
||||
return "", "", fmt.Errorf("parse LLM response %q: %w", resp, err)
|
||||
}
|
||||
|
||||
// Resolve by pick number if loinc field is empty
|
||||
if result.Loinc == "" && result.Pick > 0 && result.Pick <= len(candidates) {
|
||||
result.Loinc = candidates[result.Pick-1].LoincNum
|
||||
result.Name = candidates[result.Pick-1].LongName
|
||||
}
|
||||
// Verify the code is actually in our candidate list
|
||||
for _, c := range candidates {
|
||||
if c.LoincNum == result.Loinc {
|
||||
return result.Loinc, c.LongName, nil
|
||||
}
|
||||
}
|
||||
// Pick number as fallback
|
||||
if result.Pick > 0 && result.Pick <= len(candidates) {
|
||||
c := candidates[result.Pick-1]
|
||||
return c.LoincNum, c.LongName, nil
|
||||
}
|
||||
return "", "", fmt.Errorf("LLM returned %q (pick %d) — not in %d candidates", result.Loinc, result.Pick, len(candidates))
|
||||
}
|
||||
|
||||
// --- batch command ---
|
||||
|
||||
func cmdBatch(path string) {
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
log.Fatalf("read %s: %v", path, err)
|
||||
}
|
||||
for _, line := range strings.Split(string(data), "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
var input struct {
|
||||
Name string `json:"name"`
|
||||
Specimen string `json:"specimen"`
|
||||
Unit string `json:"unit"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(line), &input); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "skip bad line: %s\n", line)
|
||||
continue
|
||||
}
|
||||
result, err := loincLookup(input.Name, input.Specimen, input.Unit)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%s: %v\n", input.Name, err)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("%-40s → %s %s [%s]\n", input.Name, result.LoincCode, result.LoincName, result.Source)
|
||||
}
|
||||
}
|
||||
|
||||
// --- stats command ---
|
||||
|
||||
func cmdStats() {
|
||||
var total []struct{ N int `db:"n"` }
|
||||
lib.RefQuery("SELECT COUNT(*) as n FROM loinc_lab", nil, &total)
|
||||
if len(total) > 0 {
|
||||
fmt.Printf("LOINC lab codes: %d\n", total[0].N)
|
||||
}
|
||||
|
||||
var cached []struct{ N int `db:"n"` }
|
||||
lib.RefQuery("SELECT COUNT(*) as n FROM loinc_cache", nil, &cached)
|
||||
if len(cached) > 0 {
|
||||
fmt.Printf("Cached lookups: %d\n", cached[0].N)
|
||||
}
|
||||
}
|
||||
|
|
@ -389,6 +389,7 @@ func handleViewer(w http.ResponseWriter, r *http.Request) {
|
|||
<button id="btn2panels" onclick="setPanels(2)">2 Panels</button>
|
||||
<button id="btn3panels" onclick="setPanels(3)">3 Panels</button>
|
||||
<button id="btn3d" onclick="set3DMode()">3D</button>
|
||||
<button id="btnCompare" onclick="compareStudies()" title="Compare current vs prior study side-by-side with sync scroll">Compare</button>
|
||||
</div>
|
||||
<label id="syncLabel" class="sync-label"><input type="checkbox" id="syncScroll" checked><span>Sync</span></label>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,720 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>inou — Genetics</title>
|
||||
<style>
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Regular.ttf'); font-weight: 400; }
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Light.ttf'); font-weight: 300; }
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-SemiBold.ttf'); font-weight: 600; }
|
||||
@font-face { font-family: 'Sora'; src: url('/assets/fonts/Sora-Bold.ttf'); font-weight: 700; }
|
||||
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
:root {
|
||||
--amber: #B45309;
|
||||
--amber-light: #FEF3C7;
|
||||
--amber-mid: #F59E0B;
|
||||
--bg: #F8F7F6;
|
||||
--surface: #FFFFFF;
|
||||
--border: #E5E3E0;
|
||||
--text: #1A1A1A;
|
||||
--text-muted: #6B6968;
|
||||
--text-faint: #A8A5A2;
|
||||
--green: #15803D;
|
||||
--green-light: #DCFCE7;
|
||||
--nav-bg: #1C1917;
|
||||
--nav-text: #D6D3D1;
|
||||
--nav-active: #FFFFFF;
|
||||
--sidebar-w: 220px;
|
||||
--topbar-h: 52px;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Sora', system-ui, sans-serif;
|
||||
background: var(--bg);
|
||||
color: var(--text);
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
/* TOP NAV */
|
||||
.topbar {
|
||||
height: var(--topbar-h);
|
||||
background: var(--nav-bg);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0 20px;
|
||||
gap: 16px;
|
||||
flex-shrink: 0;
|
||||
border-bottom: 1px solid #2C2A28;
|
||||
}
|
||||
.topbar-logo {
|
||||
font-weight: 700;
|
||||
font-size: 16px;
|
||||
color: #FFFFFF;
|
||||
letter-spacing: -0.3px;
|
||||
}
|
||||
.topbar-logo span { color: var(--amber); }
|
||||
.topbar-patient {
|
||||
margin-left: auto;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
}
|
||||
.topbar-patient-name {
|
||||
font-size: 13px;
|
||||
color: var(--nav-text);
|
||||
font-weight: 600;
|
||||
}
|
||||
.topbar-patient-dob {
|
||||
font-size: 12px;
|
||||
color: #78716C;
|
||||
}
|
||||
.avatar {
|
||||
width: 30px; height: 30px;
|
||||
border-radius: 50%;
|
||||
background: var(--amber);
|
||||
display: flex; align-items: center; justify-content: center;
|
||||
font-size: 12px; font-weight: 700; color: white;
|
||||
}
|
||||
|
||||
/* LAYOUT */
|
||||
.layout {
|
||||
display: flex;
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* SIDEBAR */
|
||||
.sidebar {
|
||||
width: var(--sidebar-w);
|
||||
background: var(--nav-bg);
|
||||
flex-shrink: 0;
|
||||
overflow-y: auto;
|
||||
padding: 12px 0;
|
||||
border-right: 1px solid #2C2A28;
|
||||
}
|
||||
.nav-section-label {
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
color: #57534E;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.8px;
|
||||
padding: 12px 16px 4px;
|
||||
}
|
||||
.nav-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 7px 16px;
|
||||
font-size: 13px;
|
||||
color: var(--nav-text);
|
||||
cursor: pointer;
|
||||
border-radius: 0;
|
||||
text-decoration: none;
|
||||
}
|
||||
.nav-item:hover { background: #292524; }
|
||||
.nav-item.active {
|
||||
color: var(--nav-active);
|
||||
background: #292524;
|
||||
font-weight: 600;
|
||||
}
|
||||
.nav-item.active::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
width: 3px;
|
||||
height: 28px;
|
||||
background: var(--amber);
|
||||
border-radius: 0 2px 2px 0;
|
||||
}
|
||||
.nav-item { position: relative; }
|
||||
.nav-dot {
|
||||
width: 6px; height: 6px;
|
||||
border-radius: 50%;
|
||||
background: #57534E;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.nav-dot.active { background: var(--amber); }
|
||||
.nav-sub {
|
||||
padding-left: 12px;
|
||||
}
|
||||
|
||||
/* MAIN */
|
||||
.main {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding: 24px 32px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
/* BREADCRUMB + SEARCH ROW */
|
||||
.top-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
}
|
||||
.breadcrumb {
|
||||
font-size: 13px;
|
||||
color: var(--text-muted);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
.breadcrumb span { color: var(--text); font-weight: 600; }
|
||||
.breadcrumb-sep { color: var(--text-faint); }
|
||||
|
||||
.search-box {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 7px 12px;
|
||||
width: 260px;
|
||||
}
|
||||
.search-box input {
|
||||
border: none;
|
||||
outline: none;
|
||||
font-family: 'Sora', sans-serif;
|
||||
font-size: 13px;
|
||||
color: var(--text);
|
||||
background: transparent;
|
||||
width: 100%;
|
||||
}
|
||||
.search-box input::placeholder { color: var(--text-faint); }
|
||||
.search-icon { color: var(--text-faint); font-size: 15px; }
|
||||
|
||||
/* STATS ROW */
|
||||
.stats-row {
|
||||
display: flex;
|
||||
gap: 24px;
|
||||
align-items: center;
|
||||
}
|
||||
.stat {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 1px;
|
||||
}
|
||||
.stat-value {
|
||||
font-size: 22px;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
line-height: 1;
|
||||
}
|
||||
.stat-label {
|
||||
font-size: 11px;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
.stat-divider {
|
||||
width: 1px;
|
||||
height: 32px;
|
||||
background: var(--border);
|
||||
}
|
||||
.hidden-note {
|
||||
font-size: 12px;
|
||||
color: var(--text-faint);
|
||||
padding: 4px 10px;
|
||||
background: var(--bg);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 20px;
|
||||
margin-left: auto;
|
||||
cursor: pointer;
|
||||
}
|
||||
.hidden-note:hover { border-color: var(--amber); color: var(--amber); }
|
||||
|
||||
/* TIER GRID */
|
||||
.tier-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(4, 1fr);
|
||||
gap: 10px;
|
||||
}
|
||||
.tier-tile {
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 10px;
|
||||
padding: 14px 16px;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.15s, box-shadow 0.15s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
.tier-tile:hover {
|
||||
border-color: #D1C5BA;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.06);
|
||||
}
|
||||
.tier-tile.active {
|
||||
border-color: var(--amber);
|
||||
box-shadow: 0 0 0 1px var(--amber);
|
||||
}
|
||||
.tier-name {
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
color: var(--text);
|
||||
}
|
||||
.tier-count {
|
||||
font-size: 20px;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
line-height: 1.1;
|
||||
}
|
||||
.tier-hidden {
|
||||
font-size: 11px;
|
||||
color: var(--text-faint);
|
||||
}
|
||||
.tier-tile.large .tier-count { color: var(--amber); }
|
||||
|
||||
/* EXPANDED TIER */
|
||||
.expanded-section {
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 12px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.expanded-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 14px 20px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
background: #FAFAF9;
|
||||
}
|
||||
.expanded-title {
|
||||
font-size: 14px;
|
||||
font-weight: 700;
|
||||
color: var(--text);
|
||||
}
|
||||
.expanded-subtitle {
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
font-weight: 400;
|
||||
}
|
||||
.toggle-hidden {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
}
|
||||
.toggle-pill {
|
||||
width: 28px; height: 16px;
|
||||
background: var(--border);
|
||||
border-radius: 8px;
|
||||
position: relative;
|
||||
}
|
||||
.toggle-pill::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 2px; left: 2px;
|
||||
width: 12px; height: 12px;
|
||||
border-radius: 50%;
|
||||
background: white;
|
||||
box-shadow: 0 1px 2px rgba(0,0,0,0.2);
|
||||
}
|
||||
|
||||
/* VARIANTS TABLE */
|
||||
.variants-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.variants-table th {
|
||||
text-align: left;
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
color: var(--text-faint);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
padding: 10px 20px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
background: #FAFAF9;
|
||||
}
|
||||
.variants-table td {
|
||||
padding: 11px 20px;
|
||||
border-bottom: 1px solid #F0EDEA;
|
||||
vertical-align: middle;
|
||||
}
|
||||
.variants-table tr:last-child td { border-bottom: none; }
|
||||
.variants-table tr:hover td { background: #FAFAF9; }
|
||||
|
||||
.gene-name {
|
||||
font-weight: 600;
|
||||
font-size: 13px;
|
||||
color: var(--text);
|
||||
font-family: 'Sora', monospace;
|
||||
}
|
||||
.rsid {
|
||||
font-size: 11px;
|
||||
color: var(--text-faint);
|
||||
margin-top: 1px;
|
||||
}
|
||||
.finding-text {
|
||||
font-size: 13px;
|
||||
color: var(--text);
|
||||
}
|
||||
.genotype {
|
||||
font-family: 'Sora', monospace;
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
color: var(--text);
|
||||
background: #F4F1EE;
|
||||
padding: 2px 8px;
|
||||
border-radius: 4px;
|
||||
display: inline-block;
|
||||
}
|
||||
.sig-dot {
|
||||
width: 8px; height: 8px;
|
||||
border-radius: 50%;
|
||||
display: inline-block;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.sig-cell {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
white-space: nowrap;
|
||||
}
|
||||
.sig-dot.moderate { background: var(--amber); }
|
||||
.sig-dot.protective { background: var(--green); }
|
||||
.sig-dot.low { background: var(--text-faint); }
|
||||
.sig-dot.clear { background: #D4D0CB; }
|
||||
|
||||
.sig-label.moderate { color: var(--amber); }
|
||||
.sig-label.protective { color: var(--green); }
|
||||
.sig-label.low { color: var(--text-faint); }
|
||||
.sig-label.clear { color: var(--text-faint); }
|
||||
|
||||
/* EXPANDED FOOTER */
|
||||
.expanded-footer {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 12px 20px;
|
||||
border-top: 1px solid var(--border);
|
||||
background: #FAFAF9;
|
||||
}
|
||||
.footer-count {
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
.load-more {
|
||||
font-size: 12px;
|
||||
color: var(--amber);
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
text-decoration: none;
|
||||
}
|
||||
.load-more:hover { text-decoration: underline; }
|
||||
|
||||
/* AI CTA */
|
||||
.ai-cta {
|
||||
background: var(--nav-bg);
|
||||
border-radius: 12px;
|
||||
padding: 16px 20px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
}
|
||||
.ai-cta-text {
|
||||
flex: 1;
|
||||
font-size: 13px;
|
||||
color: #A8A5A2;
|
||||
line-height: 1.5;
|
||||
}
|
||||
.ai-cta-text strong { color: #FFFFFF; font-weight: 600; }
|
||||
.ai-cta-btn {
|
||||
background: var(--amber);
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 8px;
|
||||
padding: 9px 18px;
|
||||
font-family: 'Sora', sans-serif;
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.ai-cta-btn:hover { background: #9A4507; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<!-- TOP NAV -->
|
||||
<div class="topbar">
|
||||
<div class="topbar-logo">inou<span>.</span></div>
|
||||
<div class="topbar-patient">
|
||||
<div>
|
||||
<div class="topbar-patient-name">Jane Doe</div>
|
||||
<div class="topbar-patient-dob">DOB Jan 1 2017 · Female</div>
|
||||
</div>
|
||||
<div class="avatar">JD</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="layout">
|
||||
|
||||
<!-- SIDEBAR -->
|
||||
<nav class="sidebar">
|
||||
<div class="nav-section-label">Overview</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Dashboard</a>
|
||||
|
||||
<div class="nav-section-label">Tests</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Labs</a>
|
||||
<a class="nav-item active" href="#"><span class="nav-dot active"></span>Genetics</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Imaging</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Assessments</a>
|
||||
|
||||
<div class="nav-section-label">Body</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Vitals</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Exercise</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Nutrition</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Sleep</a>
|
||||
|
||||
<div class="nav-section-label">Treatment</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Medications</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Supplements</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Therapy</a>
|
||||
|
||||
<div class="nav-section-label">History</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Diagnoses</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Symptoms</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Family History</a>
|
||||
|
||||
<div class="nav-section-label">Care Team</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Consultations</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Providers</a>
|
||||
|
||||
<div class="nav-section-label">Files</div>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Documents</a>
|
||||
<a class="nav-item" href="#"><span class="nav-dot"></span>Uploads</a>
|
||||
</nav>
|
||||
|
||||
<!-- MAIN CONTENT -->
|
||||
<main class="main">
|
||||
|
||||
<!-- BREADCRUMB + SEARCH -->
|
||||
<div class="top-row">
|
||||
<div class="breadcrumb">
|
||||
Jane Doe <span class="breadcrumb-sep">›</span> Tests <span class="breadcrumb-sep">›</span> <span>Genetics</span>
|
||||
</div>
|
||||
<div class="search-box">
|
||||
<span class="search-icon">⌕</span>
|
||||
<input type="text" placeholder="Search gene or rsID…">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- STATS ROW -->
|
||||
<div class="stats-row">
|
||||
<div class="stat">
|
||||
<div class="stat-value">3,866</div>
|
||||
<div class="stat-label">Total variants</div>
|
||||
</div>
|
||||
<div class="stat-divider"></div>
|
||||
<div class="stat">
|
||||
<div class="stat-value">12</div>
|
||||
<div class="stat-label">Categories</div>
|
||||
</div>
|
||||
<div class="stat-divider"></div>
|
||||
<div class="stat">
|
||||
<div class="stat-value">597</div>
|
||||
<div class="stat-label">Hidden (no risk)</div>
|
||||
</div>
|
||||
<div class="hidden-note">Show hidden variants</div>
|
||||
</div>
|
||||
|
||||
<!-- TIER GRID -->
|
||||
<div class="tier-grid">
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Traits</div>
|
||||
<div class="tier-count">132</div>
|
||||
<div class="tier-hidden">49 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Longevity</div>
|
||||
<div class="tier-count">12</div>
|
||||
<div class="tier-hidden">1 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile active">
|
||||
<div class="tier-name">Metabolism</div>
|
||||
<div class="tier-count">97</div>
|
||||
<div class="tier-hidden">51 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Medications</div>
|
||||
<div class="tier-count">101</div>
|
||||
<div class="tier-hidden">26 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Mental Health</div>
|
||||
<div class="tier-count">63</div>
|
||||
<div class="tier-hidden">31 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Neurological</div>
|
||||
<div class="tier-count">91</div>
|
||||
<div class="tier-hidden">46 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Fertility</div>
|
||||
<div class="tier-count">12</div>
|
||||
<div class="tier-hidden">7 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Blood</div>
|
||||
<div class="tier-count">100</div>
|
||||
<div class="tier-hidden">12 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Cardiovascular</div>
|
||||
<div class="tier-count">104</div>
|
||||
<div class="tier-hidden">31 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile">
|
||||
<div class="tier-name">Autoimmune</div>
|
||||
<div class="tier-count">80</div>
|
||||
<div class="tier-hidden">43 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile large">
|
||||
<div class="tier-name">Disease</div>
|
||||
<div class="tier-count">2,272</div>
|
||||
<div class="tier-hidden">233 hidden</div>
|
||||
</div>
|
||||
<div class="tier-tile large">
|
||||
<div class="tier-name">Cancer</div>
|
||||
<div class="tier-count">998</div>
|
||||
<div class="tier-hidden">67 hidden</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- EXPANDED: METABOLISM -->
|
||||
<div class="expanded-section">
|
||||
<div class="expanded-header">
|
||||
<div>
|
||||
<div class="expanded-title">Metabolism <span style="font-weight:400; color: var(--text-muted)">· 97 variants</span></div>
|
||||
<div class="expanded-subtitle">Sorted by significance</div>
|
||||
</div>
|
||||
<div class="toggle-hidden">
|
||||
<span>Show hidden</span>
|
||||
<div class="toggle-pill"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<table class="variants-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width:130px">Gene</th>
|
||||
<th>Finding</th>
|
||||
<th style="width:90px">Genotype</th>
|
||||
<th style="width:120px">Significance</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><div class="gene-name">MTHFR</div><div class="rsid">rs1801133</div></td>
|
||||
<td><div class="finding-text">10–20% folate processing efficiency</div></td>
|
||||
<td><span class="genotype">AA</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">CYP2C19</div><div class="rsid">rs4244285</div></td>
|
||||
<td><div class="finding-text">Poorer metabolizer of several medicines</div></td>
|
||||
<td><span class="genotype">AG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">PPARG</div><div class="rsid">rs1801282</div></td>
|
||||
<td><div class="finding-text">Higher cardiovascular risk with high fat diet</div></td>
|
||||
<td><span class="genotype">CG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">TCF7L2</div><div class="rsid">rs7903146</div></td>
|
||||
<td><div class="finding-text">Increased type 2 diabetes risk</div></td>
|
||||
<td><span class="genotype">CT</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">FTO</div><div class="rsid">rs9939609</div></td>
|
||||
<td><div class="finding-text">1.67× increased obesity risk</div></td>
|
||||
<td><span class="genotype">AT</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">SLCO1B1</div><div class="rsid">rs4149056</div></td>
|
||||
<td><div class="finding-text">Increased statin-induced myopathy risk</div></td>
|
||||
<td><span class="genotype">CT</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot moderate"></span><span class="sig-label moderate">Moderate</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">APOA2</div><div class="rsid">rs5082</div></td>
|
||||
<td><div class="finding-text">Associated with higher HDL cholesterol</div></td>
|
||||
<td><span class="genotype">CC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot protective"></span><span class="sig-label protective">Protective</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">CYP1A2</div><div class="rsid">rs762551</div></td>
|
||||
<td><div class="finding-text">Slow caffeine metabolizer</div></td>
|
||||
<td><span class="genotype">AC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">CYP3A5</div><div class="rsid">rs776746</div></td>
|
||||
<td><div class="finding-text">Non-expressor — affects drug dosing</div></td>
|
||||
<td><span class="genotype">CC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">MCM6</div><div class="rsid">rs4988235</div></td>
|
||||
<td><div class="finding-text">Partial lactase persistence</div></td>
|
||||
<td><span class="genotype">AG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">APOE</div><div class="rsid">rs7412</div></td>
|
||||
<td><div class="finding-text">Normal lipid metabolism</div></td>
|
||||
<td><span class="genotype">CC</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot clear"></span><span class="sig-label clear">Clear</span></div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><div class="gene-name">GCK</div><div class="rsid">rs1799884</div></td>
|
||||
<td><div class="finding-text">Slightly reduced glucose sensing</div></td>
|
||||
<td><span class="genotype">AG</span></td>
|
||||
<td><div class="sig-cell"><span class="sig-dot low"></span><span class="sig-label low">Low</span></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div class="expanded-footer">
|
||||
<div class="footer-count">Showing 12 of 97 variants</div>
|
||||
<a class="load-more" href="#">Load more</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- AI CTA -->
|
||||
<div class="ai-cta">
|
||||
<div class="ai-cta-text">
|
||||
<strong>Your AI has access to all 3,866 variants</strong>, including hidden ones. Ask it to reason across your metabolism, medication sensitivities, and disease risk together.
|
||||
</div>
|
||||
<button class="ai-cta-btn">Ask Claude about your genetics →</button>
|
||||
</div>
|
||||
|
||||
</main>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
Loading…
Reference in New Issue