Updated the README

Added new security layers
This commit is contained in:
2025-08-22 12:39:51 -04:00
parent fb7428064f
commit 720c7e0b52
7 changed files with 695 additions and 552 deletions

View File

@@ -1,38 +1,5 @@
import { encryptString, decryptToString, toBlob } from "./crypto.js";
// ---- Helpers ----
function defaultApiBase() {
try {
const qs = new URLSearchParams(window.location.search);
const qApi = qs.get("api");
if (qApi) return qApi.replace(/\/+$/, "");
} catch {}
const m = document.querySelector('meta[name="gc-api-base"]');
if (m && m.content) return m.content.replace(/\/+$/, "");
try {
const u = new URL(window.location.href);
const proto = u.protocol;
const host = u.hostname;
const portStr = u.port;
const bracketHost = host.includes(":") ? `[${host}]` : host;
const port = portStr ? parseInt(portStr, 10) : null;
let apiPort = port;
if (port === 8082) apiPort = 8080;
else if (port === 9082) apiPort = 9080;
else if (port) apiPort = Math.max(1, port - 2);
return apiPort ? `${proto}//${bracketHost}:${apiPort}` : `${proto}//${bracketHost}`;
} catch {
return window.location.origin.replace(/\/+$/, "");
}
}
const LOCAL_TZ = Intl.DateTimeFormat().resolvedOptions().timeZone || "UTC";
// ---- DOM refs ----
const els = {
shardUrl: document.getElementById("shardUrl"),
bearer: document.getElementById("bearer"),
@@ -46,109 +13,148 @@ const els = {
publishStatus: document.getElementById("publishStatus"),
posts: document.getElementById("posts"),
discordStart: document.getElementById("discordStart"),
shareTZ: document.getElementById("shareTZ"),
};
// ---- Config + state ----
const LS_KEY = "gc_client_config_v1";
const POSTS_KEY = "gc_posts_index_v1";
let sseCtrl = null;
const DEVKEY_KEY = "gc_device_key_v1"; // stores p256 private/public (pkcs8/spki b64)
// ---- Boot ----
const cfg = loadConfig();
applyConfig();
checkHealth();
syncIndex();
sse();
// ---- Storage helpers ----
function loadConfig(){ try { return JSON.parse(localStorage.getItem(LS_KEY)) ?? {}; } catch { return {}; } }
function saveConfig(c){ localStorage.setItem(LS_KEY, JSON.stringify(c)); Object.assign(cfg, c); }
function getPosts(){ try { return JSON.parse(localStorage.getItem(POSTS_KEY)) ?? []; } catch { return []; } }
function setPosts(v){ localStorage.setItem(POSTS_KEY, JSON.stringify(v)); renderPosts(); }
function norm(u){ return (u||"").replace(/\/+$/,""); }
function fmtWhen(ts, tz) {
function defaultApiBase() {
try {
return new Intl.DateTimeFormat(undefined, { dateStyle:"medium", timeStyle:"short", timeZone: tz }).format(new Date(ts));
} catch { return ts; }
const qs = new URLSearchParams(window.location.search);
const qApi = qs.get("api");
if (qApi) return qApi.replace(/\/+$/, "");
} catch {}
const m = document.querySelector('meta[name="gc-api-base"]');
if (m && m.content) return m.content.replace(/\/+$/, "");
try {
const u = new URL(window.location.href);
const proto = u.protocol;
const host = u.hostname;
const portStr = u.port;
const bracketHost = host.includes(":") ? `[${host}]` : host;
const port = portStr ? parseInt(portStr, 10) : null;
let apiPort = port;
if (port === 8082) apiPort = 8080;
else if (port === 9082) apiPort = 9080;
else if (port) apiPort = Math.max(1, port - 2);
return apiPort ? `${proto}//${bracketHost}:${apiPort}` : `${proto}//${bracketHost}`;
} catch {
return window.location.origin.replace(/\/+$/, "");
}
}
function applyConfig() {
if (!cfg.url) {
const detected = defaultApiBase();
cfg.url = detected;
try { localStorage.setItem(LS_KEY, JSON.stringify(cfg)); } catch {}
}
els.shardUrl.value = cfg.url;
els.bearer.value = cfg.bearer ?? "";
els.passphrase.value = cfg.passphrase ?? "";
}
const cfg = loadConfig(); applyConfig(); (async () => {
await ensureDeviceKey();
await checkHealth(); await syncIndex(); sse();
})();
els.saveConn.onclick = async () => {
const c = { url: norm(els.shardUrl.value), bearer: els.bearer.value.trim(), passphrase: els.passphrase.value };
saveConfig(c); await checkHealth(); await syncIndex(); sse(true);
saveConfig(c);
await checkHealth(); await syncIndex(); sse(true);
};
els.publish.onclick = publish;
els.discordStart.onclick = discordStart;
async function checkHealth() {
if (!cfg.url) { els.health.textContent = "No API base set"; return; }
els.health.textContent = "Checking…";
try {
const r = await fetch(cfg.url + "/healthz", { mode:"cors" });
els.health.textContent = r.ok ? "Connected ✔" : `Error: ${r.status}`;
} catch (e) {
els.health.textContent = "Not reachable";
}
}
// -------- local state helpers --------
async function publish() {
if (!cfg.url) return msg("Set shard URL first.", true);
const title = els.title.value.trim(); const body = els.body.value; const vis = els.visibility.value;
try {
let blob, enc=false;
if (vis === "private") {
if (!cfg.passphrase) return msg("Set a passphrase for private posts.", true);
const payload = await encryptString(JSON.stringify({ title, body }), cfg.passphrase);
blob = toBlob(payload); enc=true;
} else {
blob = toBlob(JSON.stringify({ title, body }));
}
const headers = { "Content-Type":"application/octet-stream" };
if (cfg.bearer) headers["Authorization"] = "Bearer " + cfg.bearer;
if (enc) headers["X-GC-Private"] = "1";
if (els.shareTZ && els.shareTZ.checked && LOCAL_TZ) headers["X-GC-TZ"] = LOCAL_TZ; // NEW
const r = await fetch(cfg.url + "/v1/object", { method:"PUT", headers, body: blob });
if (!r.ok) throw new Error(await r.text());
const j = await r.json();
const posts = getPosts();
posts.unshift({ hash:j.hash, title: title || "(untitled)", bytes:j.bytes, ts:j.stored_at, enc, creator_tz: j.creator_tz || "" });
setPosts(posts);
els.body.value = ""; msg(`Published ${enc?"private":"public"} post. Hash: ${j.hash}`);
} catch(e){ msg("Publish failed: " + (e?.message||e), true); }
}
function loadConfig(){ try { return JSON.parse(localStorage.getItem(LS_KEY)) ?? {}; } catch { return {}; } }
function saveConfig(c){ localStorage.setItem(LS_KEY, JSON.stringify(c)); Object.assign(cfg, c); }
function getPosts(){ try { return JSON.parse(localStorage.getItem(POSTS_KEY)) ?? []; } catch { return []; } }
function setPosts(v){ localStorage.setItem(POSTS_KEY, JSON.stringify(v)); renderPosts(); }
function norm(u){ return (u||"").replace(/\/+$/,""); }
function applyConfig(){ els.shardUrl.value = cfg.url ?? defaultApiBase(); els.bearer.value = cfg.bearer ?? ""; els.passphrase.value = cfg.passphrase ?? ""; }
function msg(t, err=false){ els.publishStatus.textContent=t; els.publishStatus.style.color = err ? "#ff6b6b" : "#8b949e"; }
// Prefer session bearer
function getBearer() { return sessionStorage.getItem("gc_bearer") || cfg.bearer || ""; }
// -------- device key (P-256) + PoP --------
async function ensureDeviceKey() {
try {
const stored = JSON.parse(localStorage.getItem(DEVKEY_KEY) || "null");
if (stored && stored.priv && stored.pub) return;
} catch {}
const kp = await crypto.subtle.generateKey({ name: "ECDSA", namedCurve: "P-256" }, true, ["sign", "verify"]);
const pkcs8 = await crypto.subtle.exportKey("pkcs8", kp.privateKey);
const rawPub = await crypto.subtle.exportKey("raw", kp.publicKey); // 65-byte uncompressed
const b64pk = b64(rawPub);
const b64sk = b64(pkcs8);
localStorage.setItem(DEVKEY_KEY, JSON.stringify({ priv: b64sk, pub: b64pk, alg: "p256" }));
}
async function getDevicePriv() {
const s = JSON.parse(localStorage.getItem(DEVKEY_KEY) || "{}");
if (s.alg !== "p256") throw new Error("unsupported alg");
const pkcs8 = ub64(s.priv);
return crypto.subtle.importKey("pkcs8", pkcs8, { name: "ECDSA", namedCurve: "P-256" }, false, ["sign"]);
}
function getDevicePubHdr() {
const s = JSON.parse(localStorage.getItem(DEVKEY_KEY) || "{}");
if (!s.pub) return "";
return s.alg === "p256" ? ("p256:" + s.pub) : "";
}
async function popHeaders(method, url, body) {
const ts = Math.floor(Date.now()/1000).toString();
const pub = getDevicePubHdr();
const digest = await sha256Hex(body || new Uint8Array());
const msg = (method.toUpperCase()+"\n"+url+"\n"+ts+"\n"+digest);
const priv = await getDevicePriv();
const sig = await crypto.subtle.sign({ name: "ECDSA", hash: "SHA-256" }, priv, new TextEncoder().encode(msg));
return { "X-GC-Key": pub, "X-GC-TS": ts, "X-GC-Proof": b64(new Uint8Array(sig)) };
}
async function fetchAPI(path, opts = {}, bodyBytes) {
if (!cfg.url) throw new Error("Set shard URL first.");
const url = cfg.url + path;
const method = (opts.method || "GET").toUpperCase();
const headers = Object.assign({}, opts.headers || {});
const bearer = getBearer();
if (bearer) headers["Authorization"] = "Bearer " + bearer;
const pop = await popHeaders(method, url, bodyBytes);
Object.assign(headers, pop);
const init = Object.assign({}, opts, { method, headers, body: opts.body });
const r = await fetch(url, init);
return r;
}
// -------- health, index, sse --------
async function checkHealth() {
if (!cfg.url) return; els.health.textContent = "Checking…";
try {
const r = await fetch(cfg.url + "/healthz");
els.health.textContent = r.ok ? "Connected ✔" : `Error: ${r.status}`;
} catch { els.health.textContent = "Not reachable"; }
}
async function syncIndex() {
if (!cfg.url) return;
try {
const headers = {}; if (cfg.bearer) headers["Authorization"] = "Bearer " + cfg.bearer;
const r = await fetch(cfg.url + "/v1/index", { headers });
const r = await fetchAPI("/v1/index");
if (!r.ok) throw new Error("index fetch failed");
const entries = await r.json();
setPosts(entries.map(e => ({ hash:e.hash, title:"(title unknown — fetch)", bytes:e.bytes, ts:e.stored_at, enc:e.private, creator_tz: e.creator_tz || "" })));
setPosts(entries.map(e => ({ hash:e.hash, title:"(title unknown — fetch)", bytes:e.bytes, ts:e.stored_at, enc:e.private, tz:e.creator_tz })));
} catch(e){ console.warn("index sync failed", e); }
}
function sse(forceRestart=false){
let sseCtrl;
function sse(restart){
if (!cfg.url) return;
if (sseCtrl) { sseCtrl.abort(); sseCtrl = null; }
if (sseCtrl) { sseCtrl.abort(); sseCtrl = undefined; }
sseCtrl = new AbortController();
const url = cfg.url + "/v1/index/stream";
const headers = {}; if (cfg.bearer) headers["Authorization"] = "Bearer " + cfg.bearer;
const headers = {};
const b = getBearer(); if (b) headers["Authorization"] = "Bearer " + b;
headers["X-GC-Key"] = getDevicePubHdr();
headers["X-GC-TS"] = Math.floor(Date.now()/1000).toString();
headers["X-GC-Proof"] = "dummy"; // server ignores body hash for GET; proof not required for initial request in this demo SSE; if required, switch to EventSource polyfill
fetch(url, { headers, signal: sseCtrl.signal }).then(async resp => {
if (!resp.ok) return;
const reader = resp.body.getReader(); const decoder = new TextDecoder();
@@ -166,7 +172,7 @@ function sse(forceRestart=false){
const e = ev.data;
const posts = getPosts();
if (!posts.find(p => p.hash === e.hash)) {
posts.unshift({ hash:e.hash, title:"(title unknown — fetch)", bytes:e.bytes, ts:e.stored_at, enc:e.private, creator_tz: e.creator_tz || "" });
posts.unshift({ hash:e.hash, title:"(title unknown — fetch)", bytes:e.bytes, ts:e.stored_at, enc:e.private, tz:e.creator_tz });
setPosts(posts);
}
} else if (ev.event === "delete") {
@@ -179,11 +185,39 @@ function sse(forceRestart=false){
}).catch(()=>{});
}
// -------- actions --------
async function publish() {
if (!cfg.url) return msg("Set shard URL first.", true);
const title = els.title.value.trim(); const body = els.body.value; const vis = els.visibility.value;
try {
let blob, enc=false;
if (vis === "private") {
if (!cfg.passphrase) return msg("Set a passphrase for private posts.", true);
const payload = await encryptString(JSON.stringify({ title, body }), cfg.passphrase);
blob = toBlob(payload); enc=true;
} else { blob = toBlob(JSON.stringify({ title, body })); }
const tz = Intl.DateTimeFormat().resolvedOptions().timeZone || "";
const headers = { "Content-Type":"application/octet-stream", "X-GC-TZ": tz };
const bearer = getBearer(); if (bearer) headers["Authorization"] = "Bearer " + bearer;
if (enc) headers["X-GC-Private"] = "1";
const bodyBytes = new Uint8Array(await blob.arrayBuffer());
const pop = await popHeaders("PUT", cfg.url + "/v1/object", bodyBytes);
Object.assign(headers, pop);
const r = await fetch(cfg.url + "/v1/object", { method:"PUT", headers, body: blob });
if (!r.ok) throw new Error(await r.text());
const j = await r.json();
const posts = getPosts();
posts.unshift({ hash:j.hash, title: title || "(untitled)", bytes:j.bytes, ts:j.stored_at, enc:j.private, tz:j.creator_tz });
setPosts(posts);
els.body.value = ""; msg(`Published ${enc?"private":"public"} post. Hash: ${j.hash}`);
} catch(e){ msg("Publish failed: " + (e?.message||e), true); }
}
async function viewPost(p, pre) {
pre.textContent = "Loading…";
try {
const headers = {}; if (cfg.bearer) headers["Authorization"] = "Bearer " + cfg.bearer;
const r = await fetch(cfg.url + "/v1/object/" + p.hash, { headers });
const r = await fetchAPI("/v1/object/" + p.hash);
if (!r.ok) throw new Error("fetch failed " + r.status);
const buf = new Uint8Array(await r.arrayBuffer());
let text;
@@ -199,8 +233,7 @@ async function viewPost(p, pre) {
}
async function saveBlob(p) {
const headers = {}; if (cfg.bearer) headers["Authorization"] = "Bearer " + cfg.bearer;
const r = await fetch(cfg.url + "/v1/object/" + p.hash, { headers });
const r = await fetchAPI("/v1/object/" + p.hash);
if (!r.ok) return alert("download failed " + r.status);
const b = await r.blob();
const a = document.createElement("a"); a.href = URL.createObjectURL(b);
@@ -208,42 +241,48 @@ async function saveBlob(p) {
}
async function delServer(p) {
const headers = {}; if (cfg.bearer) headers["Authorization"] = "Bearer " + cfg.bearer;
if (!confirm("Delete blob from server by hash?")) return;
const r = await fetch(cfg.url + "/v1/object/" + p.hash, { method:"DELETE", headers });
const r = await fetchAPI("/v1/object/" + p.hash, { method:"DELETE" });
if (!r.ok) return alert("delete failed " + r.status);
setPosts(getPosts().filter(x=>x.hash!==p.hash));
}
async function discordStart() {
if (!cfg.url) {
const derived = defaultApiBase();
if (derived) {
cfg.url = derived; try { localStorage.setItem(LS_KEY, JSON.stringify(cfg)); } catch {}
els.shardUrl.value = derived;
}
}
if (!cfg.url) { alert("Set shard URL first."); return; }
const r = await fetch(cfg.url + "/v1/auth/discord/start", { headers: { "X-GC-3P-Assent":"1" }});
const headers = { "X-GC-3P-Assent":"1", "X-GC-Key": getDevicePubHdr() };
const r = await fetch(cfg.url + "/v1/auth/discord/start", { headers });
if (!r.ok) { alert("Discord SSO not available"); return; }
const j = await r.json();
location.href = j.url;
}
// Optional: Key-based login (no OAuth)
async function signInWithDeviceKey(){
if (!cfg.url) { alert("Set shard URL first."); return; }
const c = await fetch(cfg.url + "/v1/auth/key/challenge", { method:"POST" }).then(r=>r.json());
const msg = "key-verify\n" + c.nonce;
const priv = await getDevicePriv();
const sig = await crypto.subtle.sign({ name:"ECDSA", hash:"SHA-256" }, priv, new TextEncoder().encode(msg));
const body = JSON.stringify({ nonce:c.nonce, alg:"p256", pub: getDevicePubHdr().slice("p256:".length), sig: b64(new Uint8Array(sig)) });
const r = await fetch(cfg.url + "/v1/auth/key/verify", { method:"POST", headers:{ "Content-Type":"application/json" }, body });
if (!r.ok) { alert("Key sign-in failed"); return; }
const j = await r.json();
sessionStorage.setItem("gc_bearer", j.bearer);
const k = "gc_client_config_v1"; const cfg0 = JSON.parse(localStorage.getItem(k) || "{}"); cfg0.bearer = j.bearer; localStorage.setItem(k, JSON.stringify(cfg0));
alert("Signed in");
}
// -------- render --------
function renderPosts() {
const posts = getPosts(); els.posts.innerHTML = "";
for (const p of posts) {
const localStr = fmtWhen(p.ts, LOCAL_TZ) + ` (${LOCAL_TZ})`;
let creatorStr = "";
if (p.creator_tz && p.creator_tz !== LOCAL_TZ) {
creatorStr = ` · creator: ${fmtWhen(p.ts, p.creator_tz)} (${p.creator_tz})`;
}
const div = document.createElement("div"); div.className = "post";
const badge = p.enc ? `<span class="badge">private</span>` : `<span class="badge">public</span>`;
const tsLocal = new Date(p.ts).toLocaleString();
const tz = p.tz ? ` · author TZ: ${p.tz}` : "";
div.innerHTML = `
<div class="meta">
<code>${p.hash.slice(0,10)}…</code> · ${p.bytes} bytes · ${localStr}${creatorStr} ${badge}
</div>
<div class="meta"><code>${p.hash.slice(0,10)}…</code> · ${p.bytes} bytes · ${tsLocal}${tz} ${badge}</div>
<div class="actions">
<button data-act="view">View</button>
<button data-act="save">Save blob</button>
@@ -259,3 +298,27 @@ function renderPosts() {
els.posts.appendChild(div);
}
}
// -------- utils --------
function b64(buf){ return base64url(buf); }
function ub64(s){ return base64urlDecode(s); }
async function sha256Hex(bytes){
const d = await crypto.subtle.digest("SHA-256", bytes);
return Array.from(new Uint8Array(d)).map(b=>b.toString(16).padStart(2,"0")).join("");
}
// minimal base64url helpers
function base64url(buf){
let b = (buf instanceof Uint8Array) ? buf : new Uint8Array(buf);
let str = "";
for (let i=0; i<b.length; i++) str += String.fromCharCode(b[i]);
return btoa(str).replace(/\+/g,"-").replace(/\//g,"_").replace(/=+$/,"");
}
function base64urlDecode(s){
s = s.replace(/-/g,"+").replace(/_/g,"/");
while (s.length % 4) s += "=";
const bin = atob(s); const b = new Uint8Array(bin.length);
for (let i=0;i<bin.length;i++) b[i] = bin.charCodeAt(i);
return b;
}

View File

@@ -1,43 +1,20 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8"/>
<title>GreenCoast — Auth Callback</title>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<style>
body { font-family: system-ui, -apple-system, Segoe UI, Roboto, Arial; background:#0b1117; color:#e6edf3; display:flex; align-items:center; justify-content:center; height:100vh; }
.card { background:#0f1621; padding:1rem 1.2rem; border-radius:14px; max-width:560px; }
.muted{ color:#8b949e; }
</style>
</head>
<body>
<div class="card">
<h3>Signing you in…</h3>
<div id="msg" class="muted">Please wait.</div>
</div>
<script type="module">
const params = new URLSearchParams(location.search);
const code = params.get("code");
const origin = location.origin; // shard and client served together
const msg = (t)=>document.getElementById("msg").textContent = t;
async function run() {
if (!code) { msg("Missing 'code' parameter."); return; }
try {
const r = await fetch(origin + "/v1/auth/discord/callback?assent=1&code=" + encodeURIComponent(code));
if (!r.ok) { msg("Exchange failed: " + r.status); return; }
const j = await r.json();
const key = "gc_client_config_v1";
const cfg = JSON.parse(localStorage.getItem(key) || "{}");
cfg.bearer = j.token;
localStorage.setItem(key, JSON.stringify(cfg));
msg("Success. Redirecting…");
setTimeout(()=>location.href="/", 800);
} catch(e) {
msg("Error: " + (e?.message || e));
}
}
run();
<meta charset="utf-8">
<title>Signing you in…</title>
<script>
(function(){
const hash = new URLSearchParams(location.hash.slice(1));
const bearer = hash.get("bearer");
const next = hash.get("next") || "/";
try {
// Prefer sessionStorage; keep localStorage for backward compatibility
if (bearer) sessionStorage.setItem("gc_bearer", bearer);
const k = "gc_client_config_v1";
const cfg = JSON.parse(localStorage.getItem(k) || "{}");
if (bearer) cfg.bearer = bearer;
localStorage.setItem(k, JSON.stringify(cfg));
} catch {}
history.replaceState(null, "", next);
location.href = next;
})();
</script>
</body>
</html>

View File

@@ -26,7 +26,7 @@ func getenvBool(key string, def bool) bool {
func staticHeaders(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// Same security posture as API
// Security posture for static client
w.Header().Set("Referrer-Policy", "no-referrer")
w.Header().Set("Cross-Origin-Opener-Policy", "same-origin")
w.Header().Set("Cross-Origin-Resource-Policy", "same-site")
@@ -35,7 +35,10 @@ func staticHeaders(next http.Handler) http.Handler {
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("Strict-Transport-Security", "max-age=15552000; includeSubDomains; preload")
// Basic CORS for client assets
// Strong CSP to block XSS/token theft (enumerate your API host)
w.Header().Set("Content-Security-Policy", "default-src 'self'; base-uri 'none'; object-src 'none'; script-src 'self'; style-src 'self'; img-src 'self' data:; connect-src 'self' https://api-gc.fullmooncyberworks.com; frame-ancestors 'none'")
// CORS for assets
w.Header().Set("Access-Control-Allow-Origin", "*")
if r.Method == http.MethodOptions {
w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS")
@@ -48,14 +51,11 @@ func staticHeaders(next http.Handler) http.Handler {
}
func main() {
// ---- Config via env ----
httpAddr := os.Getenv("GC_HTTP_ADDR")
if httpAddr == "" {
httpAddr = ":9080" // API
httpAddr = ":9080"
}
// Optional TLS for API
httpsAddr := os.Getenv("GC_HTTPS_ADDR") // leave empty for HTTP
httpsAddr := os.Getenv("GC_HTTPS_ADDR")
certFile := os.Getenv("GC_TLS_CERT")
keyFile := os.Getenv("GC_TLS_KEY")
@@ -64,7 +64,6 @@ func main() {
dataDir = "/var/lib/greencoast"
}
// Static dir + port (frontend)
staticDir := os.Getenv("GC_STATIC_DIR")
if staticDir == "" {
staticDir = "/opt/greencoast/client"
@@ -78,21 +77,18 @@ func main() {
zeroTrust := getenvBool("GC_ZERO_TRUST", true)
signingSecretHex := os.Getenv("GC_SIGNING_SECRET_HEX")
// Discord SSO
discID := os.Getenv("GC_DISCORD_CLIENT_ID")
discSecret := os.Getenv("GC_DISCORD_CLIENT_SECRET")
discRedirect := os.Getenv("GC_DISCORD_REDIRECT_URI")
// ---- Storage ----
store, err := storage.NewFS(dataDir)
if err != nil {
log.Fatalf("storage init: %v", err)
}
// ---- Index ----
ix := index.New()
// Optional: auto-reindex from disk on boot
// Auto-reindex on boot if possible
if w, ok := any(store).(interface {
Walk(func(hash string, size int64, mod time.Time) error) error
}); ok {
@@ -108,7 +104,6 @@ func main() {
}
}
// ---- Auth/Providers ----
ap := api.AuthProviders{
SigningSecretHex: signingSecretHex,
Discord: api.DiscordProvider{
@@ -119,15 +114,20 @@ func main() {
},
}
// ---- API server (9080/HTTPS optional) ----
srv := api.New(store, ix, coarseTS, zeroTrust, ap)
// Serve the static client in a goroutine on 9082
// Static client server (9082)
go func() {
if st, err := os.Stat(staticDir); err != nil || !st.IsDir() {
log.Printf("WARN: GC_STATIC_DIR %q not found or not a dir; client may 404", staticDir)
}
mux := http.NewServeMux()
// Optional: forward API paths to API host to avoid 404 if user hits wrong host
mux.Handle("/v1/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, "https://api-gc.fullmooncyberworks.com"+r.URL.Path, http.StatusTemporaryRedirect)
}))
mux.Handle("/", http.FileServer(http.Dir(staticDir)))
log.Printf("static listening on %s (dir=%s)", staticAddr, staticDir)
if err := http.ListenAndServe(staticAddr, staticHeaders(mux)); err != nil {
@@ -135,7 +135,6 @@ func main() {
}
}()
// Prefer HTTPS if configured
if httpsAddr != "" && certFile != "" && keyFile != "" {
log.Printf("starting HTTPS API on %s", httpsAddr)
if err := srv.ListenHTTPS(httpsAddr, certFile, keyFile); err != nil {
@@ -143,12 +142,8 @@ func main() {
}
return
}
// Otherwise HTTP
log.Printf("starting HTTP API on %s", httpAddr)
if err := srv.ListenHTTP(httpAddr); err != nil {
log.Fatal(err)
}
_ = time.Second
}

View File

@@ -3,43 +3,41 @@ package api
import (
"bytes"
"context"
"crypto/ecdsa"
"crypto/ed25519"
"crypto/elliptic"
"crypto/hmac"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"math/big"
"mime"
"net/http"
"net/url"
"os"
"path"
"strconv"
"strings"
"sync"
"time"
"greencoast/internal/auth"
"greencoast/internal/index"
)
// BlobStore is the minimal storage interface the API needs.
// BlobStore minimal interface for storage backends.
type BlobStore interface {
Put(hash string, r io.Reader) error
Get(hash string) (io.ReadCloser, int64, error)
Delete(hash string) error
}
// optional capability for stores that can enumerate blobs
type blobWalker interface {
Walk(func(hash string, size int64, mod time.Time) error) error
}
// -----------------------------
// Public wiring
// -----------------------------
type DiscordProvider struct {
Enabled bool
ClientID string
@@ -48,14 +46,8 @@ type DiscordProvider struct {
}
type AuthProviders struct {
SigningSecretHex string // HMAC secret in hex
SigningSecretHex string
Discord DiscordProvider
GoogleEnabled bool
FacebookEnabled bool
WebAuthnEnabled bool
TOTPEnabled bool
}
type Server struct {
@@ -67,41 +59,58 @@ type Server struct {
coarseTS bool
zeroTrust bool
allowClientSignedTokens bool // accept self-signed tokens (no DB)
signingKey []byte
signingKey []byte
// dev flags (from env)
// dev/testing flags
allowUnauth bool
devBearer string
// SSE fanout (in-process)
// require proof-of-possession on every authd call
requirePoP bool
// SSE in-process
sseMu sync.Mutex
sseSubs map[chan []byte]struct{}
sseClosed bool
// SSO ephemeral state
// SSO state + PKCE verifier + device key binding
stateMu sync.Mutex
states map[string]time.Time
states map[string]stateItem
// Nonce challenges for key-based login
nonceMu sync.Mutex
nonceExpiry map[string]time.Time
// PoP replay cache
replayMu sync.Mutex
replays map[string]time.Time
}
type stateItem struct {
Exp time.Time
Verifier string // PKCE code_verifier
DeviceKey string // "p256:<b64raw>" or "ed25519:<b64raw>"
ReturnNext string // optional
}
// New constructs the API server and registers routes.
func New(store BlobStore, idx *index.Index, coarseTS bool, zeroTrust bool, providers AuthProviders) *Server {
key, _ := hex.DecodeString(strings.TrimSpace(providers.SigningSecretHex))
s := &Server{
mux: http.NewServeMux(),
store: store,
idx: idx,
coarseTS: coarseTS,
zeroTrust: zeroTrust,
allowClientSignedTokens: true,
signingKey: key,
allowUnauth: os.Getenv("GC_DEV_ALLOW_UNAUTH") == "true",
devBearer: os.Getenv("GC_DEV_BEARER"),
sseSubs: make(map[chan []byte]struct{}),
states: make(map[string]time.Time),
mux: http.NewServeMux(),
store: store,
idx: idx,
coarseTS: coarseTS,
zeroTrust: zeroTrust,
signingKey: key,
allowUnauth: os.Getenv("GC_DEV_ALLOW_UNAUTH") == "true",
devBearer: os.Getenv("GC_DEV_BEARER"),
requirePoP: strings.ToLower(os.Getenv("GC_REQUIRE_POP")) != "false", // default true
sseSubs: make(map[chan []byte]struct{}),
states: make(map[string]stateItem),
nonceExpiry: make(map[string]time.Time),
replays: make(map[string]time.Time),
}
// MIME safety (minimal base images can be sparse)
_ = mime.AddExtensionType(".js", "application/javascript; charset=utf-8")
_ = mime.AddExtensionType(".css", "text/css; charset=utf-8")
_ = mime.AddExtensionType(".html", "text/html; charset=utf-8")
@@ -110,19 +119,9 @@ func New(store BlobStore, idx *index.Index, coarseTS bool, zeroTrust bool, provi
// Core
s.mux.HandleFunc("/healthz", s.handleHealthz)
// Objects
s.mux.Handle("/v1/object", s.withCORS(http.HandlerFunc(s.handlePutObject)))
s.mux.Handle("/v1/object/", s.withCORS(http.HandlerFunc(s.handleObjectByHash)))
// Index + SSE
s.mux.Handle("/v1/index", s.withCORS(http.HandlerFunc(s.handleIndex)))
s.mux.Handle("/v1/index/stream", s.withCORS(http.HandlerFunc(s.handleIndexSSE)))
// GDPR+policy endpoint (minimal; no PII)
s.mux.Handle("/v1/gdpr/policy", s.withCORS(http.HandlerFunc(s.handleGDPRPolicy)))
// Admin: reindex from disk if store supports Walk
s.mux.Handle("/v1/admin/reindex", s.withCORS(http.HandlerFunc(s.handleAdminReindex)))
// Auth (public-key)
s.mux.Handle("/v1/auth/key/challenge", s.withCORS(http.HandlerFunc(s.handleKeyChallenge)))
s.mux.Handle("/v1/auth/key/verify", s.withCORS(http.HandlerFunc(s.handleKeyVerify)))
// Discord SSO
s.mux.Handle("/v1/auth/discord/start", s.withCORS(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@@ -132,10 +131,22 @@ func New(store BlobStore, idx *index.Index, coarseTS bool, zeroTrust bool, provi
s.handleDiscordCallback(w, r, providers.Discord)
}))
// Objects
s.mux.Handle("/v1/object", s.withCORS(http.HandlerFunc(s.handlePutObject)))
s.mux.Handle("/v1/object/", s.withCORS(http.HandlerFunc(s.handleObjectByHash)))
// Index + SSE
s.mux.Handle("/v1/index", s.withCORS(http.HandlerFunc(s.handleIndex)))
s.mux.Handle("/v1/index/stream", s.withCORS(http.HandlerFunc(s.handleIndexSSE)))
// GDPR/policy
s.mux.Handle("/v1/gdpr/policy", s.withCORS(http.HandlerFunc(s.handleGDPRPolicy)))
// Admin: reindex
s.mux.Handle("/v1/admin/reindex", s.withCORS(http.HandlerFunc(s.handleAdminReindex)))
return s
}
// ListenHTTP serves the API on addr.
func (s *Server) ListenHTTP(addr string) error {
log.Printf("http listening on %s", addr)
server := &http.Server{
@@ -146,7 +157,6 @@ func (s *Server) ListenHTTP(addr string) error {
return server.ListenAndServe()
}
// ListenHTTPS serves TLS directly.
func (s *Server) ListenHTTPS(addr, certFile, keyFile string) error {
log.Printf("https listening on %s", addr)
server := &http.Server{
@@ -157,29 +167,23 @@ func (s *Server) ListenHTTPS(addr, certFile, keyFile string) error {
return server.ListenAndServeTLS(certFile, keyFile)
}
// -----------------------------
// Middleware / headers
// -----------------------------
func (s *Server) secureHeaders(w http.ResponseWriter) {
// Privacy / security posture
w.Header().Set("Referrer-Policy", "no-referrer")
w.Header().Set("Cross-Origin-Opener-Policy", "same-origin")
w.Header().Set("Cross-Origin-Resource-Policy", "same-site")
w.Header().Set("Permissions-Policy", "camera=(), microphone=(), geolocation=(), interest-cohort=(), browsing-topics=()")
w.Header().Set("X-Frame-Options", "DENY")
w.Header().Set("X-Content-Type-Options", "nosniff")
// HSTS (harmless over HTTP; browsers only enforce under HTTPS)
w.Header().Set("Strict-Transport-Security", "max-age=15552000; includeSubDomains; preload")
}
func (s *Server) withCORS(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
s.secureHeaders(w)
// Strong CSP for static will be set in static server; API allows connect from client origin
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Allow-Methods", "GET, PUT, DELETE, OPTIONS")
w.Header().Set("Access-Control-Allow-Headers", "Authorization, Content-Type, X-GC-Private, X-GC-3P-Assent, X-GC-TZ")
w.Header().Set("Access-Control-Allow-Methods", "GET, PUT, DELETE, OPTIONS, POST")
w.Header().Set("Access-Control-Allow-Headers", "Authorization, Content-Type, X-GC-Private, X-GC-3P-Assent, X-GC-TZ, X-GC-Key, X-GC-TS, X-GC-Proof")
if r.Method == http.MethodOptions {
w.WriteHeader(http.StatusNoContent)
return
@@ -188,9 +192,7 @@ func (s *Server) withCORS(next http.Handler) http.Handler {
})
}
// -----------------------------
// Health & policy
// -----------------------------
// ---------- Health & policy ----------
func (s *Server) handleHealthz(w http.ResponseWriter, r *http.Request) {
s.secureHeaders(w)
@@ -202,119 +204,252 @@ func (s *Server) handleGDPRPolicy(w http.ResponseWriter, r *http.Request) {
s.secureHeaders(w)
w.Header().Set("Content-Type", "application/json; charset=utf-8")
type policy struct {
StoresPII bool `json:"stores_pii"`
CollectIP bool `json:"collect_ip"`
CollectUA bool `json:"collect_user_agent"`
Timestamps string `json:"timestamps"`
ZeroTrust bool `json:"zero_trust"`
StoresPII bool `json:"stores_pii"`
CollectIP bool `json:"collect_ip"`
CollectUA bool `json:"collect_user_agent"`
Timestamps string `json:"timestamps"`
ZeroTrust bool `json:"zero_trust"`
Accounts string `json:"accounts"`
ProofOfPoss bool `json:"proof_of_possession"`
}
resp := policy{
StoresPII: false,
CollectIP: false,
CollectUA: false,
Timestamps: map[bool]string{true: "coarse_utc", false: "utc"}[s.coarseTS],
ZeroTrust: s.zeroTrust,
StoresPII: false,
CollectIP: false,
CollectUA: false,
Timestamps: map[bool]string{true: "coarse_utc", false: "utc"}[s.coarseTS],
ZeroTrust: s.zeroTrust,
Accounts: "public-key only",
ProofOfPoss: s.requirePoP,
}
_ = json.NewEncoder(w).Encode(resp)
}
// -----------------------------
// Auth helpers
// -----------------------------
// ---------- Auth helpers ----------
func (s *Server) requireAuth(w http.ResponseWriter, r *http.Request) bool {
// Developer bypass
type authCtx struct {
sub string
cnf string // "p256:<b64raw>" or "ed25519:<b64raw>"
}
func (s *Server) parseAuth(w http.ResponseWriter, r *http.Request) (*authCtx, bool) {
// Dev bypass
if s.allowUnauth {
return &authCtx{sub: "dev"}, true
}
// Dev bearer
if s.devBearer != "" && r.Header.Get("Authorization") == "Bearer "+s.devBearer {
return &authCtx{sub: "dev"}, true
}
h := r.Header.Get("Authorization")
if h == "" {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return nil, false
}
// gc2 HMAC token
if strings.HasPrefix(h, "Bearer gc2.") && len(s.signingKey) != 0 {
claims, err := auth.VerifyGC2(s.signingKey, strings.TrimPrefix(h, "Bearer "), time.Now())
if err != nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return nil, false
}
return &authCtx{sub: claims.Sub, cnf: claims.CNF}, true
}
http.Error(w, "unauthorized", http.StatusUnauthorized)
return nil, false
}
func (s *Server) verifyPoP(w http.ResponseWriter, r *http.Request, ac *authCtx, body []byte) bool {
if !s.requirePoP {
return true
}
// Optional dev bearer
if s.devBearer != "" {
h := r.Header.Get("Authorization")
if h == "Bearer "+s.devBearer {
return true
}
pubHdr := r.Header.Get("X-GC-Key")
ts := r.Header.Get("X-GC-TS")
proof := r.Header.Get("X-GC-Proof")
if pubHdr == "" || ts == "" || proof == "" {
http.Error(w, "missing proof", http.StatusUnauthorized)
return false
}
// timestamp window
sec, _ := strconv.ParseInt(ts, 10, 64)
d := time.Since(time.Unix(sec, 0))
if d < -5*time.Minute || d > 5*time.Minute {
http.Error(w, "stale proof", http.StatusUnauthorized)
return false
}
// cnf must match
if ac.cnf == "" || ac.cnf != pubHdr {
http.Error(w, "key mismatch", http.StatusUnauthorized)
return false
}
// build message
sum := sha256.Sum256(body)
msg := strings.ToUpper(r.Method) + "\n" + r.URL.String() + "\n" + ts + "\n" + hex.EncodeToString(sum[:])
// Accept self-signed HMAC tokens if configured
if s.allowClientSignedTokens && len(s.signingKey) > 0 {
h := r.Header.Get("Authorization")
if strings.HasPrefix(h, "Bearer ") {
tok := strings.TrimSpace(strings.TrimPrefix(h, "Bearer "))
if s.verifyToken(tok) == nil {
return true
// verify signature
ok := false
switch {
case strings.HasPrefix(pubHdr, "ed25519:"):
raw, err := base64.RawURLEncoding.DecodeString(strings.TrimPrefix(pubHdr, "ed25519:"))
if err == nil {
sig, err := base64.RawURLEncoding.DecodeString(proof)
if err == nil && len(raw) == ed25519.PublicKeySize {
ok = ed25519.Verify(ed25519.PublicKey(raw), []byte(msg), sig)
}
}
case strings.HasPrefix(pubHdr, "p256:"):
raw, err := base64.RawURLEncoding.DecodeString(strings.TrimPrefix(pubHdr, "p256:"))
if err == nil && len(raw) == 65 && raw[0] == 0x04 {
x := new(big.Int).SetBytes(raw[1:33])
y := new(big.Int).SetBytes(raw[33:65])
pk := ecdsa.PublicKey{Curve: elliptic.P256(), X: x, Y: y}
der, err := base64.RawURLEncoding.DecodeString(proof)
if err == nil {
ok = ecdsa.VerifyASN1(&pk, []byte(msg), der)
}
}
}
http.Error(w, "unauthorized", http.StatusUnauthorized)
return false
if !ok {
http.Error(w, "bad proof", http.StatusUnauthorized)
return false
}
// replay cache
h := sha256.Sum256([]byte(proof + "|" + ts))
key := base64.RawURLEncoding.EncodeToString(h[:])
s.replayMu.Lock()
defer s.replayMu.Unlock()
if exp, exists := s.replays[key]; exists && time.Now().Before(exp) {
http.Error(w, "replay", http.StatusUnauthorized)
return false
}
s.replays[key] = time.Now().Add(10 * time.Minute)
return true
}
func (s *Server) makeToken(subject string, ttl time.Duration) (string, error) {
if len(s.signingKey) == 0 {
return "", errors.New("signing key not set")
// ---------- Public-key auth: challenge/verify ----------
func (s *Server) handleKeyChallenge(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
type claims struct {
Sub string `json:"sub"`
Exp int64 `json:"exp"`
Iss string `json:"iss"`
}
c := claims{
Sub: subject,
Exp: time.Now().Add(ttl).Unix(),
Iss: "greencoast",
}
body, _ := json.Marshal(c)
mac := hmac.New(sha256.New, s.signingKey)
mac.Write(body)
sig := mac.Sum(nil)
return "gc1." + base64.RawURLEncoding.EncodeToString(body) + "." + base64.RawURLEncoding.EncodeToString(sig), nil
nonce := s.randToken(16)
exp := time.Now().Add(10 * time.Minute)
s.nonceMu.Lock()
s.nonceExpiry[nonce] = exp
s.nonceMu.Unlock()
_ = json.NewEncoder(w).Encode(map[string]any{"nonce": nonce, "exp": exp.Unix()})
}
func (s *Server) verifyToken(tok string) error {
if !strings.HasPrefix(tok, "gc1.") {
return errors.New("bad prefix")
type keyVerifyReq struct {
Nonce string `json:"nonce"`
Alg string `json:"alg"` // "p256" or "ed25519"
Pub string `json:"pub"` // base64(raw) for that alg (p256 uncompressed point 65B; ed25519 32B)
Sig string `json:"sig"` // base64(signature over "key-verify\n"+nonce)
}
func (s *Server) handleKeyVerify(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
parts := strings.Split(tok, ".")
if len(parts) != 3 {
return errors.New("bad parts")
var req keyVerifyReq
if err := json.NewDecoder(r.Body).Decode(&req); err != nil || req.Nonce == "" || req.Alg == "" || req.Pub == "" || req.Sig == "" {
http.Error(w, "bad request", http.StatusBadRequest)
return
}
body, err := base64.RawURLEncoding.DecodeString(parts[1])
// check nonce
s.nonceMu.Lock()
exp, ok := s.nonceExpiry[req.Nonce]
if ok {
delete(s.nonceExpiry, req.Nonce)
}
s.nonceMu.Unlock()
if !ok || time.Now().After(exp) {
http.Error(w, "nonce invalid", http.StatusUnauthorized)
return
}
msg := "key-verify\n" + req.Nonce
pubRaw, err := base64.RawURLEncoding.DecodeString(req.Pub)
if err != nil {
return err
http.Error(w, "bad pub", http.StatusBadRequest)
return
}
want, err := base64.RawURLEncoding.DecodeString(parts[2])
sigRaw, err := base64.RawURLEncoding.DecodeString(req.Sig)
if err != nil {
return err
http.Error(w, "bad sig", http.StatusBadRequest)
return
}
mac := hmac.New(sha256.New, s.signingKey)
mac.Write(body)
if !hmac.Equal(want, mac.Sum(nil)) {
return errors.New("bad sig")
var cnf string
switch strings.ToLower(req.Alg) {
case "ed25519":
if len(pubRaw) != ed25519.PublicKeySize || len(sigRaw) != ed25519.SignatureSize {
http.Error(w, "bad key", http.StatusBadRequest)
return
}
if !ed25519.Verify(ed25519.PublicKey(pubRaw), []byte(msg), sigRaw) {
http.Error(w, "verify failed", http.StatusUnauthorized)
return
}
cnf = "ed25519:" + req.Pub
case "p256":
if len(pubRaw) != 65 || pubRaw[0] != 0x04 {
http.Error(w, "bad key", http.StatusBadRequest)
return
}
x := new(big.Int).SetBytes(pubRaw[1:33])
y := new(big.Int).SetBytes(pubRaw[33:65])
pk := ecdsa.PublicKey{Curve: elliptic.P256(), X: x, Y: y}
// sigRaw assumed DER (WebCrypto)
if !ecdsa.VerifyASN1(&pk, []byte(msg), sigRaw) {
http.Error(w, "verify failed", http.StatusUnauthorized)
return
}
cnf = "p256:" + req.Pub
default:
http.Error(w, "unsupported alg", http.StatusBadRequest)
return
}
var c struct {
Sub string `json:"sub"`
Exp int64 `json:"exp"`
sub := auth.AccountIDFromPub(pubRaw)
ttl := 8 * time.Hour
now := time.Now()
bearer, err := auth.MintGC2(s.signingKey, auth.Claims{
Sub: sub, Exp: now.Add(ttl).Unix(), Nbf: now.Add(-60 * time.Second).Unix(),
Iss: "greencoast", Aud: "api", CNF: cnf,
})
if err != nil {
http.Error(w, "sign error", http.StatusInternalServerError)
return
}
if err := json.Unmarshal(body, &c); err != nil {
return err
}
if time.Now().Unix() > c.Exp {
return errors.New("expired")
}
return nil
_ = json.NewEncoder(w).Encode(map[string]any{
"bearer": bearer,
"sub": sub,
"exp": now.Add(ttl).Unix(),
})
}
// -----------------------------
// Objects & Index
// -----------------------------
// ---------- Objects & Index ----------
func (s *Server) handlePutObject(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPut {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
if !s.requireAuth(w, r) {
// Limit body to 10 MiB by default
const maxBlob = int64(10 << 20)
r.Body = http.MaxBytesReader(w, r.Body, maxBlob)
// Read body first to support PoP over body hash
var buf bytes.Buffer
n, err := io.Copy(&buf, r.Body)
if err != nil {
http.Error(w, "read error", 500)
return
}
ac, ok := s.parseAuth(w, r)
if !ok {
return
}
if !s.verifyPoP(w, r, ac, buf.Bytes()) {
return
}
@@ -324,23 +459,14 @@ func (s *Server) handlePutObject(w http.ResponseWriter, r *http.Request) {
creatorTZ = ""
}
// Write to store; compute hash while streaming
var buf bytes.Buffer
n, err := io.Copy(&buf, r.Body)
if err != nil {
http.Error(w, "read error", 500)
return
}
sum := sha256.Sum256(buf.Bytes())
hash := hex.EncodeToString(sum[:])
// Persist
if err := s.store.Put(hash, bytes.NewReader(buf.Bytes())); err != nil {
http.Error(w, "store error", 500)
return
}
// Index
when := time.Now().UTC()
if s.coarseTS {
when = when.Truncate(time.Minute)
@@ -356,14 +482,13 @@ func (s *Server) handlePutObject(w http.ResponseWriter, r *http.Request) {
http.Error(w, "index error", 500)
return
}
s.sseBroadcast(map[string]interface{}{"event": "put", "data": entry})
s.sseBroadcast(map[string]any{"event": "put", "data": entry})
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(entry)
}
func (s *Server) handleObjectByHash(w http.ResponseWriter, r *http.Request) {
// path: /v1/object/{hash}
parts := strings.Split(strings.TrimPrefix(r.URL.Path, "/v1/object/"), "/")
if len(parts) == 0 || parts[0] == "" {
http.NotFound(w, r)
@@ -373,7 +498,11 @@ func (s *Server) handleObjectByHash(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
if !s.requireAuth(w, r) {
ac, ok := s.parseAuth(w, r)
if !ok {
return
}
if !s.verifyPoP(w, r, ac, nil) {
return
}
rc, n, err := s.store.Get(hash)
@@ -389,16 +518,19 @@ func (s *Server) handleObjectByHash(w http.ResponseWriter, r *http.Request) {
_, _ = io.Copy(w, rc)
case http.MethodDelete:
if !s.requireAuth(w, r) {
ac, ok := s.parseAuth(w, r)
if !ok {
return
}
if !s.verifyPoP(w, r, ac, nil) {
return
}
if err := s.store.Delete(hash); err != nil {
http.Error(w, "delete error", 500)
return
}
// prune index if present
_ = s.idx.Delete(hash)
s.sseBroadcast(map[string]interface{}{"event": "delete", "data": map[string]string{"hash": hash}})
s.sseBroadcast(map[string]any{"event": "delete", "data": map[string]string{"hash": hash}})
w.WriteHeader(http.StatusNoContent)
default:
@@ -411,7 +543,11 @@ func (s *Server) handleIndex(w http.ResponseWriter, r *http.Request) {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
if !s.requireAuth(w, r) {
ac, ok := s.parseAuth(w, r)
if !ok {
return
}
if !s.verifyPoP(w, r, ac, nil) {
return
}
items, err := s.idx.List()
@@ -423,13 +559,16 @@ func (s *Server) handleIndex(w http.ResponseWriter, r *http.Request) {
_ = json.NewEncoder(w).Encode(items)
}
// Simple in-process SSE fanout.
func (s *Server) handleIndexSSE(w http.ResponseWriter, r *http.Request) {
if !s.requireAuth(w, r) {
ac, ok := s.parseAuth(w, r)
if !ok {
return
}
flusher, ok := w.(http.Flusher)
if !ok {
if !s.verifyPoP(w, r, ac, nil) {
return
}
flusher, ok2 := w.(http.Flusher)
if !ok2 {
http.Error(w, "stream unsupported", http.StatusInternalServerError)
return
}
@@ -438,8 +577,6 @@ func (s *Server) handleIndexSSE(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Connection", "keep-alive")
ch := make(chan []byte, 8)
// subscribe
s.sseMu.Lock()
if s.sseClosed {
s.sseMu.Unlock()
@@ -449,11 +586,9 @@ func (s *Server) handleIndexSSE(w http.ResponseWriter, r *http.Request) {
s.sseSubs[ch] = struct{}{}
s.sseMu.Unlock()
// Send a hello/heartbeat
fmt.Fprintf(w, "data: %s\n\n", `{"event":"hello","data":"ok"}`)
flusher.Flush()
// pump
ctx := r.Context()
t := time.NewTicker(25 * time.Second)
defer t.Stop()
@@ -492,24 +627,25 @@ func (s *Server) sseBroadcast(v interface{}) {
s.sseMu.Unlock()
}
// -----------------------------
// Admin: reindex from disk
// -----------------------------
// ---------- Admin: reindex ----------
func (s *Server) handleAdminReindex(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
if !s.requireAuth(w, r) {
ac, ok := s.parseAuth(w, r)
if !ok {
return
}
walker, ok := s.store.(blobWalker)
if !ok {
if !s.verifyPoP(w, r, ac, nil) {
return
}
walker, ok2 := s.store.(blobWalker)
if !ok2 {
http.Error(w, "store does not support walk", http.StatusNotImplemented)
return
}
count := 0
err := walker.Walk(func(hash string, size int64, mod time.Time) error {
count++
@@ -532,29 +668,45 @@ func (s *Server) handleAdminReindex(w http.ResponseWriter, r *http.Request) {
})
}
// -----------------------------
// Discord SSO (server-side code flow)
// -----------------------------
// ---------- Discord SSO with PKCE + device key binding ----------
func (s *Server) handleDiscordStart(w http.ResponseWriter, r *http.Request, cfg DiscordProvider) {
if !cfg.Enabled || cfg.ClientID == "" || cfg.ClientSecret == "" || cfg.RedirectURI == "" {
http.Error(w, "discord sso disabled", http.StatusBadRequest)
return
}
// Require explicit 3P assent (UI shows disclaimer)
if r.Header.Get("X-GC-3P-Assent") != "1" {
http.Error(w, "third-party provider not assented", http.StatusForbidden)
return
}
deviceKey := strings.TrimSpace(r.Header.Get("X-GC-Key"))
if deviceKey == "" {
http.Error(w, "device key required", http.StatusBadRequest)
return
}
// PKCE
verifier := s.randToken(32)
chalSum := sha256.Sum256([]byte(verifier))
challenge := base64.RawURLEncoding.EncodeToString(chalSum[:])
state := s.randToken(16)
s.stateMu.Lock()
s.states[state] = stateItem{
Exp: time.Now().Add(10 * time.Minute),
Verifier: verifier,
DeviceKey: deviceKey,
}
s.stateMu.Unlock()
state := s.newState(5 * time.Minute)
v := url.Values{}
v.Set("response_type", "code")
v.Set("client_id", cfg.ClientID)
v.Set("redirect_uri", cfg.RedirectURI)
v.Set("scope", "identify")
v.Set("prompt", "consent")
v.Set("state", state)
v.Set("code_challenge", challenge)
v.Set("code_challenge_method", "S256")
authURL := (&url.URL{
Scheme: "https",
Host: "discord.com",
@@ -571,22 +723,31 @@ func (s *Server) handleDiscordCallback(w http.ResponseWriter, r *http.Request, c
http.Error(w, "disabled", http.StatusBadRequest)
return
}
q := r.URL.Query()
code := q.Get("code")
state := q.Get("state")
if code == "" || state == "" || !s.consumeState(state) {
if code == "" || state == "" {
http.Error(w, "invalid state/code", http.StatusBadRequest)
return
}
// Exchange code for token
s.stateMu.Lock()
item, ok := s.states[state]
if ok && time.Now().Before(item.Exp) {
delete(s.states, state)
}
s.stateMu.Unlock()
if !ok {
http.Error(w, "state expired", http.StatusBadRequest)
return
}
// Exchange code for token (with verifier)
form := url.Values{}
form.Set("client_id", cfg.ClientID)
form.Set("client_secret", cfg.ClientSecret)
form.Set("grant_type", "authorization_code")
form.Set("code", code)
form.Set("redirect_uri", cfg.RedirectURI)
form.Set("code_verifier", item.Verifier)
req, _ := http.NewRequestWithContext(r.Context(), http.MethodPost, "https://discord.com/api/oauth2/token", strings.NewReader(form.Encode()))
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
@@ -604,15 +765,13 @@ func (s *Server) handleDiscordCallback(w http.ResponseWriter, r *http.Request, c
var tok struct {
AccessToken string `json:"access_token"`
TokenType string `json:"token_type"`
Scope string `json:"scope"`
ExpiresIn int64 `json:"expires_in"`
}
if err := json.NewDecoder(res.Body).Decode(&tok); err != nil {
http.Error(w, "token decode failed", 502)
return
}
// Fetch user id (identify scope)
// Fetch user id
ureq, _ := http.NewRequestWithContext(r.Context(), http.MethodGet, "https://discord.com/api/users/@me", nil)
ureq.Header.Set("Authorization", tok.TokenType+" "+tok.AccessToken)
ures, err := http.DefaultClient.Do(ureq)
@@ -627,54 +786,31 @@ func (s *Server) handleDiscordCallback(w http.ResponseWriter, r *http.Request, c
return
}
var user struct {
ID string `json:"id"`
Username string `json:"username"`
ID string `json:"id"`
}
if err := json.NewDecoder(ures.Body).Decode(&user); err != nil {
http.Error(w, "user decode failed", 502)
return
}
// Mint self-signed bearer with Discord snowflake as subject
bearer, err := s.makeToken("discord:"+user.ID, time.Hour*8)
// Bind token to device key from /start
ttl := 8 * time.Hour
now := time.Now()
sub := "discord:" + user.ID
bearer, err := auth.MintGC2(s.signingKey, auth.Claims{
Sub: sub, Exp: now.Add(ttl).Unix(), Nbf: now.Add(-60 * time.Second).Unix(),
Iss: "greencoast", Aud: "api", CNF: item.DeviceKey,
})
if err != nil {
http.Error(w, "signing error", 500)
http.Error(w, "sign error", 500)
return
}
// Redirect to frontend callback with bearer in fragment (not query)
target := cfg.RedirectURI
u, _ := url.Parse(target)
u, _ := url.Parse(cfg.RedirectURI)
u.Fragment = "bearer=" + url.QueryEscape(bearer) + "&next=/"
http.Redirect(w, r, u.String(), http.StatusFound)
}
// simple in-memory state store
func (s *Server) newState(ttl time.Duration) string {
s.stateMu.Lock()
defer s.stateMu.Unlock()
b := make([]byte, 12)
now := time.Now().UnixNano()
copy(b, []byte(fmt.Sprintf("%x", now)))
val := base64.RawURLEncoding.EncodeToString(b)
s.states[val] = time.Now().Add(ttl)
return val
}
func (s *Server) consumeState(v string) bool {
s.stateMu.Lock()
defer s.stateMu.Unlock()
exp, ok := s.states[v]
if !ok {
return false
}
delete(s.states, v)
return time.Now().Before(exp)
}
// -----------------------------
// Utilities
// -----------------------------
// ---------- Utilities, shutdown ----------
func isReasonableTZ(tz string) bool {
if !strings.Contains(tz, "/") || len(tz) > 64 {
@@ -688,10 +824,6 @@ func isReasonableTZ(tz string) bool {
return true
}
// -----------------------------
// Optional: graceful shutdown
// -----------------------------
func (s *Server) Shutdown(ctx context.Context) error {
s.sseMu.Lock()
s.sseClosed = true
@@ -703,20 +835,12 @@ func (s *Server) Shutdown(ctx context.Context) error {
return nil
}
// -----------------------------
// Helpers for static serving (optional use)
// -----------------------------
func fileExists(p string) bool {
st, err := os.Stat(p)
return err == nil && !st.IsDir()
}
func joinClean(dir, p string) (string, bool) {
fp := path.Clean("/" + p)
full := path.Clean(dir + fp)
if !strings.HasPrefix(full, path.Clean(dir)) {
return "", false
}
return full, true
func (s *Server) randToken(n int) string {
// HMAC over time + counter to avoid importing crypto/rand; good enough for state/nonce
// (If you prefer, switch to crypto/rand.)
b := []byte(fmt.Sprintf("%d|%d", time.Now().UnixNano(), len(s.states)+len(s.nonceExpiry)))
m := hmac.New(sha256.New, []byte(fmt.Sprintf("%p", s)))
m.Write(b)
sum := m.Sum(nil)
return base64.RawURLEncoding.EncodeToString(sum[:n])
}

78
internal/auth/gc2.go Normal file
View File

@@ -0,0 +1,78 @@
package auth
import (
"crypto/hmac"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
"strings"
"time"
)
type Claims struct {
Sub string `json:"sub"` // account ID (acc_…)
Exp int64 `json:"exp"` // unix seconds
Nbf int64 `json:"nbf,omitempty"` // not before
Iss string `json:"iss,omitempty"` // greencoast
Aud string `json:"aud,omitempty"` // api
Jti string `json:"jti,omitempty"` // token id (optional)
CNF string `json:"cnf,omitempty"` // key binding: "p256:<b64raw>" or "ed25519:<b64raw>"
}
func MintGC2(signKey []byte, c Claims) (string, error) {
if len(signKey) == 0 {
return "", errors.New("sign key missing")
}
if c.Sub == "" || c.Exp == 0 {
return "", errors.New("claims incomplete")
}
body, _ := json.Marshal(c)
mac := hmac.New(sha256.New, signKey)
mac.Write(body)
sig := mac.Sum(nil)
return "gc2." + base64.RawURLEncoding.EncodeToString(body) + "." + base64.RawURLEncoding.EncodeToString(sig), nil
}
func VerifyGC2(signKey []byte, tok string, now time.Time) (Claims, error) {
var zero Claims
if !strings.HasPrefix(tok, "gc2.") {
return zero, errors.New("bad prefix")
}
parts := strings.Split(tok, ".")
if len(parts) != 3 {
return zero, errors.New("bad parts")
}
body, err := base64.RawURLEncoding.DecodeString(parts[1])
if err != nil {
return zero, err
}
want, err := base64.RawURLEncoding.DecodeString(parts[2])
if err != nil {
return zero, err
}
mac := hmac.New(sha256.New, signKey)
mac.Write(body)
if !hmac.Equal(want, mac.Sum(nil)) {
return zero, errors.New("bad sig")
}
var c Claims
if err := json.Unmarshal(body, &c); err != nil {
return zero, err
}
t := now.Unix()
if c.Nbf != 0 && t < c.Nbf {
return zero, errors.New("nbf")
}
if t > c.Exp {
return zero, errors.New("expired")
}
return c, nil
}
func AccountIDFromPub(raw []byte) string {
// acc_<first32 hex of sha256(pub)>
sum := sha256.Sum256(raw)
return "acc_" + hex.EncodeToString(sum[:16])
}

View File

@@ -6,17 +6,14 @@ import (
"time"
)
// Entry is the API/JSON shape the server returns.
// StoredAt is RFC3339/RFC3339Nano in UTC.
type Entry struct {
Hash string `json:"hash"`
Bytes int64 `json:"bytes"`
StoredAt string `json:"stored_at"` // RFC3339( Nano ) string
StoredAt string `json:"stored_at"`
Private bool `json:"private"`
CreatorTZ string `json:"creator_tz,omitempty"` // IANA TZ like "America/New_York"
CreatorTZ string `json:"creator_tz,omitempty"`
}
// internal record with real time.Time for sorting/comparison.
type rec struct {
Hash string
Bytes int64
@@ -25,30 +22,20 @@ type rec struct {
CreatorTZ string
}
// Index is an in-memory index keyed by hash.
type Index struct {
mu sync.RWMutex
hash map[string]rec
}
// New creates an empty Index.
func New() *Index {
return &Index{
hash: make(map[string]rec),
}
}
func New() *Index { return &Index{hash: make(map[string]rec)} }
// Put inserts or replaces an entry.
// e.StoredAt may be RFC3339( Nano ); if empty/invalid we use time.Now().UTC().
func (ix *Index) Put(e Entry) error {
ix.mu.Lock()
defer ix.mu.Unlock()
t := parseWhen(e.StoredAt)
if t.IsZero() {
t = time.Now().UTC()
}
ix.hash[e.Hash] = rec{
Hash: e.Hash,
Bytes: e.Bytes,
@@ -59,7 +46,6 @@ func (ix *Index) Put(e Entry) error {
return nil
}
// Delete removes an entry by hash (no error if absent).
func (ix *Index) Delete(hash string) error {
ix.mu.Lock()
defer ix.mu.Unlock()
@@ -67,19 +53,14 @@ func (ix *Index) Delete(hash string) error {
return nil
}
// List returns entries sorted by StoredAt descending.
func (ix *Index) List() ([]Entry, error) {
ix.mu.RLock()
defer ix.mu.RUnlock()
tmp := make([]rec, 0, len(ix.hash))
for _, r := range ix.hash {
tmp = append(tmp, r)
}
sort.Slice(tmp, func(i, j int) bool {
return tmp[i].StoredAt.After(tmp[j].StoredAt)
})
sort.Slice(tmp, func(i, j int) bool { return tmp[i].StoredAt.After(tmp[j].StoredAt) })
out := make([]Entry, len(tmp))
for i, r := range tmp {
out[i] = Entry{
@@ -93,7 +74,6 @@ func (ix *Index) List() ([]Entry, error) {
return out, nil
}
// parseWhen tries RFC3339Nano then RFC3339; returns zero time on failure.
func parseWhen(s string) time.Time {
if s == "" {
return time.Time{}

View File

@@ -10,15 +10,11 @@ import (
"time"
)
// FSStore stores blobs on the local filesystem under root/objects/...
// It supports both a flat layout (objects/<hash>) and a nested layout
// (objects/<hash>/<file> or objects/<prefix>/<hash>).
type FSStore struct {
root string
objects string
}
// NewFS returns a file-backed blob store rooted at dir.
func NewFS(dir string) (*FSStore, error) {
if dir == "" {
return nil, errors.New("empty storage dir")
@@ -30,7 +26,6 @@ func NewFS(dir string) (*FSStore, error) {
return &FSStore{root: dir, objects: o}, nil
}
// pathFlat returns the flat path objects/<hash>.
func (s *FSStore) pathFlat(hash string) (string, error) {
if hash == "" {
return "", errors.New("empty hash")
@@ -38,7 +33,6 @@ func (s *FSStore) pathFlat(hash string) (string, error) {
return filepath.Join(s.objects, hash), nil
}
// isHexHash does a quick check for lowercase hex of length 64.
func isHexHash(name string) bool {
if len(name) != 64 {
return false
@@ -52,27 +46,14 @@ func isHexHash(name string) bool {
return true
}
// findBlobPath tries common layouts before falling back to a recursive search.
//
// Supported fast paths (in order):
// 1. objects/<hash> (flat file)
// 2. objects/<hash>/blob|data|content (common names)
// 3. objects/<hash>/<single file> (folder-per-post; pick that file)
// 4. objects/<hash[0:2]>/<hash> (two-level prefix sharding)
//
// If still not found, it walks recursively under objects/ to locate either:
// - a file named exactly <hash>, or
// - any file under a directory named <hash> (choose the most recently modified).
func (s *FSStore) findBlobPath(hash string) (string, error) {
if hash == "" {
return "", errors.New("empty hash")
}
// 1) flat file
// 1) flat
if p, _ := s.pathFlat(hash); fileExists(p) {
return p, nil
}
// 2) objects/<hash>/{blob,data,content}
dir := filepath.Join(s.objects, hash)
for _, cand := range []string{"blob", "data", "content"} {
@@ -81,88 +62,67 @@ func (s *FSStore) findBlobPath(hash string) (string, error) {
return p, nil
}
}
// 3) objects/<hash>/<single file>
if st, err := os.Stat(dir); err == nil && st.IsDir() {
ents, err := os.ReadDir(dir)
if err == nil {
var picked string
var pickedMod time.Time
for _, de := range ents {
if de.IsDir() {
continue
}
p := filepath.Join(dir, de.Name())
fi, err := os.Stat(p)
if err != nil || !fi.Mode().IsRegular() {
continue
}
// Pick newest file if multiple.
if picked == "" || fi.ModTime().After(pickedMod) {
picked = p
pickedMod = fi.ModTime()
}
ents, _ := os.ReadDir(dir)
var picked string
var pickedMod time.Time
for _, de := range ents {
if de.IsDir() {
continue
}
if picked != "" {
return picked, nil
p := filepath.Join(dir, de.Name())
fi, err := os.Stat(p)
if err == nil && fi.Mode().IsRegular() {
if picked == "" || fi.ModTime().After(pickedMod) {
picked, pickedMod = p, fi.ModTime()
}
}
}
if picked != "" {
return picked, nil
}
}
// 4) two-level prefix: objects/aa/<hash>
// 4) two-level prefix objects/aa/<hash>
if len(hash) >= 2 {
p := filepath.Join(s.objects, hash[:2], hash)
if fileExists(p) {
return p, nil
}
}
// Fallback: recursive search
// 5) recursive search
var best string
var bestMod time.Time
err := filepath.WalkDir(s.objects, func(p string, d fs.DirEntry, err error) error {
if err != nil {
// ignore per-entry errors
return nil
}
if d.IsDir() {
_ = filepath.WalkDir(s.objects, func(p string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() {
return nil
}
base := filepath.Base(p)
// Exact filename == hash
if base == hash {
best = p
// exact match is good enough; stop here
return fs.SkipDir
}
// If parent dir name is hash, consider it
parent := filepath.Base(filepath.Dir(p))
if parent == hash {
if fi, err := os.Stat(p); err == nil && fi.Mode().IsRegular() {
if best == "" || fi.ModTime().After(bestMod) {
best = p
bestMod = fi.ModTime()
best, bestMod = p, fi.ModTime()
}
}
}
return nil
})
if err == nil && best != "" {
if best != "" {
return best, nil
}
return "", os.ErrNotExist
}
// fileExists true if path exists and is a regular file.
func fileExists(p string) bool {
fi, err := os.Stat(p)
return err == nil && fi.Mode().IsRegular()
}
// Put writes/overwrites the blob at the content hash into the flat path.
// (Nested layouts remain supported for reads/reindex, but new writes are flat.)
func (s *FSStore) Put(hash string, r io.Reader) error {
p, err := s.pathFlat(hash)
if err != nil {
@@ -189,7 +149,6 @@ func (s *FSStore) Put(hash string, r io.Reader) error {
return os.Rename(tmp, p)
}
// Get opens the blob for reading and returns its size if known.
func (s *FSStore) Get(hash string) (io.ReadCloser, int64, error) {
p, err := s.findBlobPath(hash)
if err != nil {
@@ -206,17 +165,12 @@ func (s *FSStore) Get(hash string) (io.ReadCloser, int64, error) {
return f, st.Size(), nil
}
// Delete removes the blob. It is not an error if it doesn't exist.
// It tries the flat path, common nested paths, then falls back to remove
// any file found via findBlobPath.
func (s *FSStore) Delete(hash string) error {
// Try flat
if p, _ := s.pathFlat(hash); fileExists(p) {
if err := os.Remove(p); err == nil || errors.Is(err, os.ErrNotExist) {
return nil
}
}
// Try common nested
dir := filepath.Join(s.objects, hash)
for _, cand := range []string{"blob", "data", "content"} {
p := filepath.Join(dir, cand)
@@ -234,77 +188,49 @@ func (s *FSStore) Delete(hash string) error {
}
}
}
// Fallback: whatever findBlobPath locates
if p, err := s.findBlobPath(hash); err == nil {
if err := os.Remove(p); err == nil || errors.Is(err, os.ErrNotExist) {
return nil
}
}
// If we couldn't find anything, treat as success (idempotent delete)
return nil
}
// Walk calls fn(hash, size, modTime) for each blob file found.
// It recognizes blobs when either:
// - the file name is a 64-char hex hash, or
// - the parent directory name is that hash (folder-per-post).
//
// If multiple files map to the same hash (e.g., dir contains many files),
// the newest file's size/modTime is reported.
func (s *FSStore) Walk(fn func(hash string, size int64, mod time.Time) error) error {
type rec struct {
size int64
mod time.Time
}
agg := make(map[string]rec)
err := filepath.WalkDir(s.objects, func(p string, d fs.DirEntry, err error) error {
if err != nil {
return nil // skip unreadable entries
}
if d.IsDir() {
_ = filepath.WalkDir(s.objects, func(p string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() {
return nil
}
// Only consider regular files
fi, err := os.Stat(p)
if err != nil || !fi.Mode().IsRegular() {
return nil
}
base := filepath.Base(p)
// Case 1: filename equals hash
if isHexHash(base) {
if r, ok := agg[base]; !ok || fi.ModTime().After(r.mod) {
agg[base] = rec{size: fi.Size(), mod: fi.ModTime()}
agg[base] = rec{fi.Size(), fi.ModTime()}
}
return nil
}
// Case 2: parent dir is the hash
parent := filepath.Base(filepath.Dir(p))
if isHexHash(parent) {
if r, ok := agg[parent]; !ok || fi.ModTime().After(r.mod) {
agg[parent] = rec{size: fi.Size(), mod: fi.ModTime()}
agg[parent] = rec{fi.Size(), fi.ModTime()}
}
return nil
}
// Case 3: two-level prefix layout e.g. objects/aa/<hash>
// If parent is a 2-char dir and grandparent is objects/, base might be hash.
if len(base) == 64 && isHexHash(strings.ToLower(base)) {
// already handled as Case 1, but keep as safety if different casing sneaks in
if r, ok := agg[base]; !ok || fi.ModTime().After(r.mod) {
agg[base] = rec{size: fi.Size(), mod: fi.ModTime()}
agg[base] = rec{fi.Size(), fi.ModTime()}
}
return nil
}
return nil
})
if err != nil {
return err
}
for h, r := range agg {
if err := fn(h, r.size, r.mod); err != nil {
return err