inou/api/api_image.go

247 lines
6.3 KiB
Go

package main
import (
"bytes"
"encoding/json"
"fmt"
"image"
"image/color"
"image/draw"
"image/png"
"net/http"
"os"
"strconv"
"strings"
"github.com/chai2010/webp"
xdraw "golang.org/x/image/draw"
"inou/lib"
)
// cropCoords holds the crop region from series metadata
type cropCoords struct {
X1, Y1, X2, Y2 int
Valid bool
}
// getCropFromSeries retrieves crop coordinates from series metadata
func getCropFromSeries(seriesID string) cropCoords {
if seriesID == "" {
return cropCoords{}
}
series, err := lib.EntryGet(nil, seriesID) // nil ctx - helper called after RBAC check
if err != nil {
return cropCoords{}
}
var data struct {
CropX1 int `json:"crop_x1"`
CropY1 int `json:"crop_y1"`
CropX2 int `json:"crop_x2"`
CropY2 int `json:"crop_y2"`
}
if err := json.Unmarshal([]byte(series.Data), &data); err != nil {
return cropCoords{}
}
// Validate: x2 > x1 and y2 > y1
if data.CropX2 > data.CropX1 && data.CropY2 > data.CropY1 {
return cropCoords{
X1: data.CropX1,
Y1: data.CropY1,
X2: data.CropX2,
Y2: data.CropY2,
Valid: true,
}
}
return cropCoords{}
}
func handleImage(w http.ResponseWriter, r *http.Request) {
ctx := getAccessContextOrFail(w, r)
if ctx == nil {
return
}
// Extract hex ID from path: /image/{id}
hexID := strings.TrimPrefix(r.URL.Path, "/image/")
if hexID == "" || len(hexID) != 16 {
http.Error(w, "Invalid ID", http.StatusBadRequest)
return
}
entryID := hexID
if entryID == "" {
http.Error(w, "Invalid ID", http.StatusBadRequest)
return
}
// Get slice info from DB (RBAC already checked by portal)
entry, err := lib.EntryGet(ctx, entryID)
if err != nil {
http.Error(w, "Slice not found", http.StatusNotFound)
return
}
if entry.Category != lib.CategoryImaging || entry.Type != "slice" {
http.Error(w, "Slice not found", http.StatusNotFound)
return
}
dossierID := entry.DossierID
seriesID := entry.ParentID
// Get crop coordinates from series (unless ?full=1)
wantFull := r.URL.Query().Get("full") == "1"
var crop cropCoords
if !wantFull {
crop = getCropFromSeries(seriesID)
}
// Parse data JSON
dataJSON := entry.Data
var data struct {
WindowCenter float64 `json:"window_center"`
WindowWidth float64 `json:"window_width"`
PixelMin int `json:"pixel_min"`
PixelMax int `json:"pixel_max"`
RescaleSlope float64 `json:"rescale_slope"`
RescaleIntercept float64 `json:"rescale_intercept"`
}
json.Unmarshal([]byte(dataJSON), &data)
rescaleSlope := data.RescaleSlope
if rescaleSlope == 0 {
rescaleSlope = 1
}
center := data.WindowCenter
width := data.WindowWidth
// For CT (rescaleIntercept != 0), convert W/L from Hounsfield Units to raw pixel space
if data.RescaleIntercept != 0 {
center = (center - data.RescaleIntercept) / rescaleSlope
width = width / rescaleSlope
}
// Fallback to pixel range if no W/L provided or if W/L is outside pixel range
if center == 0 && width == 0 {
center = float64(data.PixelMin+data.PixelMax) / 2
width = float64(data.PixelMax - data.PixelMin)
if width == 0 {
width = 1
}
}
// Allow query param overrides
if wc := r.URL.Query().Get("wc"); wc != "" {
if v, err := strconv.ParseFloat(wc, 64); err == nil {
center = v
}
}
if ww := r.URL.Query().Get("ww"); ww != "" {
if v, err := strconv.ParseFloat(ww, 64); err == nil {
width = v
}
}
// Load and decrypt 16-bit PNG
encryptedData, err := os.ReadFile(lib.ObjectPath(dossierID, entryID))
if err != nil {
http.Error(w, "Image file not found", http.StatusNotFound)
return
}
decryptedData, err := lib.CryptoDecryptBytes(encryptedData)
if err != nil {
http.Error(w, "Failed to decrypt image", http.StatusInternalServerError)
return
}
img, err := png.Decode(bytes.NewReader(decryptedData))
if err != nil {
http.Error(w, "Failed to decode image", http.StatusInternalServerError)
return
}
var finalImg image.Image
bounds := img.Bounds()
switch src := img.(type) {
case *image.Gray16:
// 16-bit grayscale: apply window/level
low := center - width/2
high := center + width/2
lut := make([]uint8, 65536)
for i := 0; i < 65536; i++ {
var v uint8
if float64(i) <= low {
v = 0
} else if float64(i) >= high {
v = 255
} else {
v = uint8((float64(i) - low) * 255 / width)
}
if v < 18 {
v = 0
}
lut[i] = v
}
out := image.NewGray(bounds)
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
pix := src.Gray16At(x, y).Y
out.SetGray(x, y, color.Gray{Y: lut[pix]})
}
}
finalImg = out
case *image.RGBA, *image.NRGBA:
// RGB/RGBA: pass through directly (DWI, color overlays, etc.)
finalImg = src
default:
http.Error(w, fmt.Sprintf("Unsupported image format: %T", img), http.StatusInternalServerError)
return
}
if crop.Valid {
// Clamp crop to image bounds
x1 := max(crop.X1, bounds.Min.X)
y1 := max(crop.Y1, bounds.Min.Y)
x2 := min(crop.X2+1, bounds.Max.X) // +1 because crop coords are inclusive
y2 := min(crop.Y2+1, bounds.Max.Y)
if x2 > x1 && y2 > y1 {
cropRect := image.Rect(0, 0, x2-x1, y2-y1)
cropped := image.NewRGBA(cropRect)
draw.Draw(cropped, cropRect, finalImg, image.Pt(x1, y1), draw.Src)
finalImg = cropped
}
}
// Check for maxdim parameter (default 2000 for Claude API compatibility)
maxDim := 2000
if md := r.URL.Query().Get("maxdim"); md != "" {
if v, err := strconv.Atoi(md); err == nil && v > 0 {
maxDim = v
}
}
// Resize if either dimension exceeds maxDim
finalBounds := finalImg.Bounds()
w0, h0 := finalBounds.Dx(), finalBounds.Dy()
if w0 > maxDim || h0 > maxDim {
scale := float64(maxDim) / float64(max(w0, h0))
newW := int(float64(w0) * scale)
newH := int(float64(h0) * scale)
resized := image.NewRGBA(image.Rect(0, 0, newW, newH))
xdraw.BiLinear.Scale(resized, resized.Bounds(), finalImg, finalBounds, xdraw.Over, nil)
finalImg = resized
}
// Return WebP lossless
w.Header().Set("Content-Type", "image/webp")
w.Header().Set("Cache-Control", "public, max-age=86400")
w.Header().Set("X-Inou-Window", fmt.Sprintf("%.0f", width))
w.Header().Set("X-Inou-Level", fmt.Sprintf("%.0f", center))
if crop.Valid {
w.Header().Set("X-Inou-Crop", fmt.Sprintf("%d,%d,%d,%d", crop.X1, crop.Y1, crop.X2, crop.Y2))
}
webp.Encode(w, finalImg, &webp.Options{Lossless: true})
}