aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFuwn <[email protected]>2026-02-26 20:20:09 -0800
committerFuwn <[email protected]>2026-02-26 20:20:09 -0800
commit80dc07bf7bbf3ee9f7191a0446199d74cbb2d341 (patch)
tree746f1518723b64a5270131a1e19b6c6f5a312926
parentchore: remove accidentally committed binary (diff)
downloadplutia-test-80dc07bf7bbf3ee9f7191a0446199d74cbb2d341.tar.xz
plutia-test-80dc07bf7bbf3ee9f7191a0446199d74cbb2d341.zip
fix: align PLC compatibility read endpoints with plc.directory schema
-rw-r--r--cmd/plccompat/main.go879
-rw-r--r--internal/api/plc_compatibility_test.go122
-rw-r--r--internal/api/server.go291
-rw-r--r--internal/ingest/service.go257
4 files changed, 1486 insertions, 63 deletions
diff --git a/cmd/plccompat/main.go b/cmd/plccompat/main.go
new file mode 100644
index 0000000..133a766
--- /dev/null
+++ b/cmd/plccompat/main.go
@@ -0,0 +1,879 @@
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/json"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+ "math/rand"
+ "net/http"
+ "net/url"
+ "os"
+ "sort"
+ "strings"
+ "time"
+)
+
+type endpointResult struct {
+ Endpoint string `json:"endpoint"`
+ Context string `json:"context,omitempty"`
+ Compatible bool `json:"compatible"`
+ StatusLocal int `json:"status_local"`
+ StatusUpstream int `json:"status_upstream"`
+ ContentTypeLocal string `json:"content_type_local"`
+ ContentTypeUp string `json:"content_type_upstream"`
+ Issues []string `json:"issues,omitempty"`
+}
+
+type report struct {
+ GeneratedAt string `json:"generated_at"`
+ LocalBase string `json:"local_base"`
+ UpstreamBase string `json:"upstream_base"`
+ SelectedDIDs []string `json:"selected_dids"`
+ TombstoneDID string `json:"tombstone_did,omitempty"`
+ LegacyCreateDID string `json:"legacy_create_did,omitempty"`
+ Results []endpointResult `json:"results"`
+ AllCompatible bool `json:"all_compatible"`
+ CompatibilityNote string `json:"compatibility_note,omitempty"`
+}
+
+type fetchResp struct {
+ status int
+ contentType string
+ body []byte
+}
+
+func main() {
+ var (
+ localBase = flag.String("local", "http://127.0.0.1:8080", "local Plutia base URL")
+ upstreamBase = flag.String("upstream", "https://plc.directory", "upstream PLC directory base URL")
+ count = flag.Int("count", 10, "export count for matrix export checks")
+ sampleN = flag.Int("sample", 5, "random DID samples from local export")
+ seed = flag.Int64("seed", time.Now().UnixNano(), "random seed")
+ )
+ flag.Parse()
+
+ if *count < 1 {
+ fmt.Fprintln(os.Stderr, "count must be >= 1")
+ os.Exit(2)
+ }
+ if *sampleN < 1 {
+ fmt.Fprintln(os.Stderr, "sample must be >= 1")
+ os.Exit(2)
+ }
+
+ client := &http.Client{Timeout: 30 * time.Second}
+
+ dids, tombstone, legacy, err := discoverDIDs(client, strings.TrimRight(*localBase, "/"), 2500, *sampleN, *seed)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "discover dids: %v\n", err)
+ os.Exit(1)
+ }
+
+ rep := report{
+ GeneratedAt: time.Now().UTC().Format(time.RFC3339),
+ LocalBase: strings.TrimRight(*localBase, "/"),
+ UpstreamBase: strings.TrimRight(*upstreamBase, "/"),
+ SelectedDIDs: dids,
+ TombstoneDID: tombstone,
+ LegacyCreateDID: legacy,
+ AllCompatible: true,
+ }
+
+ for _, did := range dids {
+ for _, suffix := range []string{"", "/log", "/log/last", "/log/audit", "/data"} {
+ path := "/" + did + suffix
+ res := compareJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, path, "did="+did)
+ rep.Results = append(rep.Results, res)
+ if !res.Compatible {
+ rep.AllCompatible = false
+ }
+ }
+ }
+
+ // Export schema + shape comparison.
+ exportPath := fmt.Sprintf("/export?count=%d", *count)
+ exportRes := compareNDJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, exportPath)
+ rep.Results = append(rep.Results, exportRes)
+ if !exportRes.Compatible {
+ rep.AllCompatible = false
+ }
+
+ // 404 shape comparison.
+ missingDID := "did:plc:this-does-not-exist-for-compat-check"
+ notFoundRes := compareJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, "/"+missingDID, "missing_did")
+ rep.Results = append(rep.Results, notFoundRes)
+ if !notFoundRes.Compatible {
+ rep.AllCompatible = false
+ }
+
+ // 410 shape comparison where available.
+ if tombstone != "" {
+ goneRes := compareJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, "/"+tombstone, "tombstone_did")
+ rep.Results = append(rep.Results, goneRes)
+ if !goneRes.Compatible {
+ rep.AllCompatible = false
+ }
+ }
+
+ if !rep.AllCompatible {
+ rep.CompatibilityNote = "incompatibilities detected; inspect issues per endpoint"
+ }
+
+ enc := json.NewEncoder(os.Stdout)
+ enc.SetIndent("", " ")
+ _ = enc.Encode(rep)
+
+ if !rep.AllCompatible {
+ os.Exit(1)
+ }
+}
+
+func discoverDIDs(client *http.Client, base string, count int, sampleN int, seed int64) ([]string, string, string, error) {
+ path := fmt.Sprintf("%s/export?count=%d", base, count)
+ res, err := doFetch(client, path)
+ if err != nil {
+ return nil, "", "", err
+ }
+ if res.status != http.StatusOK {
+ return nil, "", "", fmt.Errorf("local export status=%d", res.status)
+ }
+
+ type rec struct {
+ DID string `json:"did"`
+ Operation map[string]interface{} `json:"operation"`
+ }
+
+ r := bufio.NewScanner(bytes.NewReader(res.body))
+ r.Buffer(make([]byte, 0, 64*1024), 10*1024*1024)
+ unique := map[string]struct{}{}
+ ordered := make([]string, 0, count)
+ tombstone := ""
+ legacy := ""
+
+ for r.Scan() {
+ line := bytes.TrimSpace(r.Bytes())
+ if len(line) == 0 {
+ continue
+ }
+ var row rec
+ if err := json.Unmarshal(line, &row); err != nil {
+ continue
+ }
+ if row.DID == "" {
+ continue
+ }
+ if _, ok := unique[row.DID]; !ok {
+ unique[row.DID] = struct{}{}
+ ordered = append(ordered, row.DID)
+ }
+ typ, _ := row.Operation["type"].(string)
+ switch typ {
+ case "plc_tombstone", "tombstone":
+ if tombstone == "" {
+ tombstone = row.DID
+ }
+ case "create":
+ if legacy == "" {
+ legacy = row.DID
+ }
+ }
+ }
+ if err := r.Err(); err != nil {
+ return nil, "", "", err
+ }
+
+ if len(ordered) == 0 {
+ return nil, "", "", errors.New("no dids discovered from local export")
+ }
+
+ if sampleN > len(ordered) {
+ sampleN = len(ordered)
+ }
+
+ rng := rand.New(rand.NewSource(seed))
+ perm := rng.Perm(len(ordered))
+ out := make([]string, 0, sampleN+2)
+ for i := 0; i < sampleN; i++ {
+ out = append(out, ordered[perm[i]])
+ }
+ if tombstone != "" && !contains(out, tombstone) {
+ out = append(out, tombstone)
+ }
+ if legacy != "" && !contains(out, legacy) {
+ out = append(out, legacy)
+ }
+
+ return out, tombstone, legacy, nil
+}
+
+func compareJSONEndpoint(client *http.Client, localBase, upstreamBase, path, ctx string) endpointResult {
+ res := endpointResult{Endpoint: path, Context: ctx, Compatible: true}
+
+ localURL := localBase + path
+ upURL := upstreamBase + path
+
+ l, lErr := doFetch(client, localURL)
+ u, uErr := doFetch(client, upURL)
+ if lErr != nil || uErr != nil {
+ res.Compatible = false
+ if lErr != nil {
+ res.Issues = append(res.Issues, "local fetch error: "+lErr.Error())
+ }
+ if uErr != nil {
+ res.Issues = append(res.Issues, "upstream fetch error: "+uErr.Error())
+ }
+ return res
+ }
+
+ res.StatusLocal = l.status
+ res.StatusUpstream = u.status
+ res.ContentTypeLocal = l.contentType
+ res.ContentTypeUp = u.contentType
+
+ if l.status != u.status {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("status mismatch local=%d upstream=%d", l.status, u.status))
+ }
+ if normalizeContentType(l.contentType) != normalizeContentType(u.contentType) {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("content-type mismatch local=%q upstream=%q", normalizeContentType(l.contentType), normalizeContentType(u.contentType)))
+ }
+
+ if isJSONish(normalizeContentType(l.contentType)) && isJSONish(normalizeContentType(u.contentType)) {
+ kind := endpointKind(path)
+ issues := append(
+ validateEndpointJSONShape(kind, l.status, l.body, "local"),
+ validateEndpointJSONShape(kind, u.status, u.body, "upstream")...,
+ )
+
+ switch kind {
+ case "did", "data", "error":
+ lm, um, strictIssues := compareJSONBodies(l.body, u.body)
+ if lm || um || len(strictIssues) > 0 {
+ issues = append(issues, strictIssues...)
+ }
+ }
+
+ if len(issues) > 0 {
+ res.Compatible = false
+ res.Issues = append(res.Issues, issues...)
+ }
+ }
+
+ return res
+}
+
+func compareNDJSONEndpoint(client *http.Client, localBase, upstreamBase, path string) endpointResult {
+ res := endpointResult{Endpoint: path, Context: "export", Compatible: true}
+ localURL := localBase + path
+ upURL := upstreamBase + path
+
+ l, lErr := doFetch(client, localURL)
+ u, uErr := doFetch(client, upURL)
+ if lErr != nil || uErr != nil {
+ res.Compatible = false
+ if lErr != nil {
+ res.Issues = append(res.Issues, "local fetch error: "+lErr.Error())
+ }
+ if uErr != nil {
+ res.Issues = append(res.Issues, "upstream fetch error: "+uErr.Error())
+ }
+ return res
+ }
+
+ res.StatusLocal = l.status
+ res.StatusUpstream = u.status
+ res.ContentTypeLocal = l.contentType
+ res.ContentTypeUp = u.contentType
+
+ if l.status != u.status {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("status mismatch local=%d upstream=%d", l.status, u.status))
+ }
+ if normalizeContentType(l.contentType) != normalizeContentType(u.contentType) {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("content-type mismatch local=%q upstream=%q", normalizeContentType(l.contentType), normalizeContentType(u.contentType)))
+ }
+
+ localRows, lErr := parseNDJSON(l.body)
+ upRows, uErr := parseNDJSON(u.body)
+ if lErr != nil || uErr != nil {
+ res.Compatible = false
+ if lErr != nil {
+ res.Issues = append(res.Issues, "local ndjson parse error: "+lErr.Error())
+ }
+ if uErr != nil {
+ res.Issues = append(res.Issues, "upstream ndjson parse error: "+uErr.Error())
+ }
+ return res
+ }
+
+ for i, row := range localRows {
+ issues := validateExportShape(row)
+ for _, issue := range issues {
+ res.Issues = append(res.Issues, fmt.Sprintf("local line %d: %s", i+1, issue))
+ }
+ }
+ for i, row := range upRows {
+ issues := validateExportShape(row)
+ for _, issue := range issues {
+ res.Issues = append(res.Issues, fmt.Sprintf("upstream line %d: %s", i+1, issue))
+ }
+ }
+
+ if len(res.Issues) > 0 {
+ res.Compatible = false
+ }
+
+ return res
+}
+
+func endpointKind(path string) string {
+ switch {
+ case strings.HasSuffix(path, "/log/audit"):
+ return "log_audit"
+ case strings.HasSuffix(path, "/log/last"):
+ return "log_last"
+ case strings.HasSuffix(path, "/log"):
+ return "log"
+ case strings.HasSuffix(path, "/data"):
+ return "data"
+ case strings.HasPrefix(path, "/did:"):
+ return "did"
+ default:
+ return "error"
+ }
+}
+
+func validateEndpointJSONShape(kind string, status int, body []byte, side string) []string {
+ decoded, err := decodeJSON(body)
+ if err != nil {
+ return []string{fmt.Sprintf("%s json decode error: %v", side, err)}
+ }
+
+ switch kind {
+ case "did":
+ switch status {
+ case http.StatusOK:
+ return prefixIssues(side, validateDIDDocumentShape(decoded))
+ case http.StatusGone:
+ issues := validateDIDDocumentShape(decoded)
+ if len(issues) == 0 {
+ return nil
+ }
+
+ if msgIssues := validateMessageErrorShape(decoded); len(msgIssues) == 0 {
+ return nil
+ }
+
+ return prefixIssues(side, issues)
+ case http.StatusNotFound:
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ default:
+ return nil
+ }
+ case "data":
+ if status == http.StatusOK {
+ return prefixIssues(side, validatePLCDataShape(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ case "log":
+ if status == http.StatusOK {
+ return prefixIssues(side, validateOperationArray(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ case "log_last":
+ if status == http.StatusOK {
+ return prefixIssues(side, validateOperationShape(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ case "log_audit":
+ if status == http.StatusOK {
+ return prefixIssues(side, validateAuditArrayShape(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ default:
+ if status == http.StatusNotFound || status == http.StatusMethodNotAllowed {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ }
+}
+
+func decodeJSON(body []byte) (interface{}, error) {
+ var v interface{}
+ dec := json.NewDecoder(bytes.NewReader(body))
+ dec.UseNumber()
+
+ if err := dec.Decode(&v); err != nil {
+ return nil, err
+ }
+
+ return v, nil
+}
+
+func prefixIssues(side string, issues []string) []string {
+ if len(issues) == 0 {
+ return nil
+ }
+
+ out := make([]string, 0, len(issues))
+ for _, issue := range issues {
+ out = append(out, fmt.Sprintf("%s %s", side, issue))
+ }
+
+ return out
+}
+
+func validateMessageErrorShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object error body"}
+ }
+
+ msg, ok := obj["message"].(string)
+ if !ok || strings.TrimSpace(msg) == "" {
+ return []string{"missing message string field"}
+ }
+
+ if _, hasError := obj["error"]; hasError {
+ return []string{"unexpected error field"}
+ }
+
+ return nil
+}
+
+func validateDIDDocumentShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object did document"}
+ }
+
+ issues := make([]string, 0)
+ if _, ok := obj["@context"].([]interface{}); !ok {
+ issues = append(issues, "missing @context array")
+ }
+
+ if _, ok := obj["id"].(string); !ok {
+ issues = append(issues, "missing id string")
+ }
+
+ allowed := map[string]struct{}{
+ "@context": {},
+ "id": {},
+ "alsoKnownAs": {},
+ "verificationMethod": {},
+ "service": {},
+ "deactivated": {},
+ }
+
+ if val, ok := obj["alsoKnownAs"]; ok && typeName(val) != "array" {
+ issues = append(issues, "alsoKnownAs must be array")
+ }
+
+ if val, ok := obj["verificationMethod"]; ok && typeName(val) != "array" {
+ issues = append(issues, "verificationMethod must be array")
+ }
+
+ if val, ok := obj["service"]; ok && typeName(val) != "array" {
+ issues = append(issues, "service must be array")
+ }
+
+ if val, ok := obj["deactivated"]; ok && typeName(val) != "bool" {
+ issues = append(issues, "deactivated must be bool")
+ }
+
+ for key := range obj {
+ if _, ok := allowed[key]; !ok {
+ issues = append(issues, "extra field "+key)
+ }
+ }
+
+ sort.Strings(issues)
+
+ return issues
+}
+
+func validatePLCDataShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object plc data"}
+ }
+
+ required := map[string]string{
+ "did": "string",
+ "verificationMethods": "object",
+ "rotationKeys": "array",
+ "alsoKnownAs": "array",
+ "services": "object",
+ }
+
+ issues := make([]string, 0)
+ for key, wantType := range required {
+ got, ok := obj[key]
+ if !ok {
+ issues = append(issues, "missing field "+key)
+
+ continue
+ }
+
+ if gotType := typeName(got); gotType != wantType {
+ issues = append(issues, fmt.Sprintf("field %s type=%s want=%s", key, gotType, wantType))
+ }
+ }
+
+ for key := range obj {
+ if _, ok := required[key]; !ok {
+ issues = append(issues, "extra field "+key)
+ }
+ }
+
+ sort.Strings(issues)
+
+ return issues
+}
+
+func validateOperationArray(v interface{}) []string {
+ rows, ok := v.([]interface{})
+ if !ok {
+ return []string{"expected array of operations"}
+ }
+
+ issues := make([]string, 0)
+ for idx, row := range rows {
+ rowIssues := validateOperationShape(row)
+ for _, issue := range rowIssues {
+ issues = append(issues, fmt.Sprintf("row %d: %s", idx, issue))
+ }
+ }
+
+ return issues
+}
+
+func validateAuditArrayShape(v interface{}) []string {
+ rows, ok := v.([]interface{})
+ if !ok {
+ return []string{"expected array of audit entries"}
+ }
+
+ issues := make([]string, 0)
+ for idx, row := range rows {
+ obj, ok := row.(map[string]interface{})
+ if !ok {
+ issues = append(issues, fmt.Sprintf("row %d: expected object", idx))
+
+ continue
+ }
+
+ required := map[string]string{
+ "did": "string",
+ "operation": "object",
+ "cid": "string",
+ "nullified": "bool",
+ "createdAt": "string",
+ }
+
+ for key, want := range required {
+ val, ok := obj[key]
+ if !ok {
+ issues = append(issues, fmt.Sprintf("row %d: missing field %s", idx, key))
+
+ continue
+ }
+
+ if got := typeName(val); got != want {
+ issues = append(issues, fmt.Sprintf("row %d: field %s type=%s want=%s", idx, key, got, want))
+ }
+ }
+
+ if op, ok := obj["operation"]; ok {
+ for _, issue := range validateOperationShape(op) {
+ issues = append(issues, fmt.Sprintf("row %d operation: %s", idx, issue))
+ }
+ }
+ }
+
+ return issues
+}
+
+func validateOperationShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected operation object"}
+ }
+
+ typ, _ := obj["type"].(string)
+ if strings.TrimSpace(typ) == "" {
+ return []string{"missing type string"}
+ }
+
+ issues := make([]string, 0)
+
+ if got := typeName(obj["sig"]); got != "string" {
+ issues = append(issues, "missing sig string")
+ }
+
+ if _, ok := obj["prev"]; !ok {
+ issues = append(issues, "missing prev field")
+ }
+
+ switch typ {
+ case "create":
+ for _, field := range []string{"handle", "service", "signingKey", "recoveryKey"} {
+ if got := typeName(obj[field]); got != "string" {
+ issues = append(issues, "missing "+field+" string")
+ }
+ }
+ case "plc_operation":
+ if got := typeName(obj["services"]); got != "object" {
+ issues = append(issues, "missing services object")
+ }
+
+ if got := typeName(obj["alsoKnownAs"]); got != "array" {
+ issues = append(issues, "missing alsoKnownAs array")
+ }
+
+ if got := typeName(obj["rotationKeys"]); got != "array" {
+ issues = append(issues, "missing rotationKeys array")
+ }
+
+ if got := typeName(obj["verificationMethods"]); got != "object" {
+ issues = append(issues, "missing verificationMethods object")
+ }
+ case "plc_tombstone", "tombstone":
+ // No additional requirements.
+ }
+
+ sort.Strings(issues)
+
+ return issues
+}
+
+func doFetch(client *http.Client, rawURL string) (fetchResp, error) {
+ u, err := url.Parse(rawURL)
+ if err != nil {
+ return fetchResp{}, err
+ }
+ req, err := http.NewRequest(http.MethodGet, u.String(), nil)
+ if err != nil {
+ return fetchResp{}, err
+ }
+ resp, err := client.Do(req)
+ if err != nil {
+ return fetchResp{}, err
+ }
+ defer resp.Body.Close()
+
+ b, err := io.ReadAll(io.LimitReader(resp.Body, 10*1024*1024))
+ if err != nil {
+ return fetchResp{}, err
+ }
+
+ return fetchResp{
+ status: resp.StatusCode,
+ contentType: resp.Header.Get("Content-Type"),
+ body: b,
+ }, nil
+}
+
+func parseNDJSON(body []byte) ([]interface{}, error) {
+ out := make([]interface{}, 0)
+ s := bufio.NewScanner(bytes.NewReader(body))
+ s.Buffer(make([]byte, 0, 64*1024), 10*1024*1024)
+ for s.Scan() {
+ line := bytes.TrimSpace(s.Bytes())
+ if len(line) == 0 {
+ continue
+ }
+ var v interface{}
+ dec := json.NewDecoder(bytes.NewReader(line))
+ dec.UseNumber()
+ if err := dec.Decode(&v); err != nil {
+ return nil, err
+ }
+ out = append(out, v)
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func validateExportShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object line"}
+ }
+
+ required := map[string]string{
+ "did": "string",
+ "operation": "object",
+ "cid": "string",
+ "nullified": "bool",
+ "createdAt": "string",
+ }
+
+ issues := make([]string, 0)
+ for k, want := range required {
+ vv, ok := obj[k]
+ if !ok {
+ issues = append(issues, "missing field "+k)
+ continue
+ }
+ if typeName(vv) != want {
+ issues = append(issues, fmt.Sprintf("field %s type=%s want=%s", k, typeName(vv), want))
+ }
+ }
+
+ for k := range obj {
+ if _, ok := required[k]; !ok {
+ issues = append(issues, "extra field "+k)
+ }
+ }
+
+ sort.Strings(issues)
+ return issues
+}
+
+func compareJSONBodies(localBody, upBody []byte) (bool, bool, []string) {
+ var l, u interface{}
+ ld := json.NewDecoder(bytes.NewReader(localBody))
+ ld.UseNumber()
+ if err := ld.Decode(&l); err != nil {
+ return false, false, []string{"local json decode error: " + err.Error()}
+ }
+ ud := json.NewDecoder(bytes.NewReader(upBody))
+ ud.UseNumber()
+ if err := ud.Decode(&u); err != nil {
+ return false, false, []string{"upstream json decode error: " + err.Error()}
+ }
+
+ lm, um, issues := diffAny(l, u, "$")
+ return lm, um, issues
+}
+
+func diffAny(local, upstream interface{}, path string) (hasLocalMismatch bool, hasUpMismatch bool, issues []string) {
+ lt, ut := typeName(local), typeName(upstream)
+ if lt != ut {
+ return true, true, []string{fmt.Sprintf("%s type mismatch local=%s upstream=%s", path, lt, ut)}
+ }
+
+ switch l := local.(type) {
+ case map[string]interface{}:
+ u := upstream.(map[string]interface{})
+ for k := range u {
+ if _, ok := l[k]; !ok {
+ hasLocalMismatch = true
+ issues = append(issues, fmt.Sprintf("%s missing field in local: %s", path, k))
+ }
+ }
+ for k := range l {
+ if _, ok := u[k]; !ok {
+ hasUpMismatch = true
+ issues = append(issues, fmt.Sprintf("%s extra field in local: %s", path, k))
+ }
+ }
+ for k := range l {
+ uv, ok := u[k]
+ if !ok {
+ continue
+ }
+ lm, um, sub := diffAny(l[k], uv, path+"."+k)
+ if lm {
+ hasLocalMismatch = true
+ }
+ if um {
+ hasUpMismatch = true
+ }
+ issues = append(issues, sub...)
+ }
+ case []interface{}:
+ u := upstream.([]interface{})
+ n := min(len(l), len(u))
+ for i := 0; i < n; i++ {
+ lm, um, sub := diffAny(l[i], u[i], fmt.Sprintf("%s[%d]", path, i))
+ if lm {
+ hasLocalMismatch = true
+ }
+ if um {
+ hasUpMismatch = true
+ }
+ issues = append(issues, sub...)
+ }
+ }
+
+ return hasLocalMismatch, hasUpMismatch, issues
+}
+
+func typeName(v interface{}) string {
+ switch v.(type) {
+ case nil:
+ return "null"
+ case bool:
+ return "bool"
+ case string:
+ return "string"
+ case json.Number, float64, float32, int, int64, int32, uint, uint64:
+ return "number"
+ case []interface{}:
+ return "array"
+ case map[string]interface{}:
+ return "object"
+ default:
+ return fmt.Sprintf("%T", v)
+ }
+}
+
+func normalizeContentType(v string) string {
+ v = strings.TrimSpace(v)
+ if v == "" {
+ return ""
+ }
+ parts := strings.Split(v, ";")
+ return strings.ToLower(strings.TrimSpace(parts[0]))
+}
+
+func isJSONish(ct string) bool {
+ return ct == "application/json" || ct == "application/did+ld+json"
+}
+
+func contains(xs []string, target string) bool {
+ for _, x := range xs {
+ if x == target {
+ return true
+ }
+ }
+ return false
+}
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
diff --git a/internal/api/plc_compatibility_test.go b/internal/api/plc_compatibility_test.go
index fe7ee82..45428d8 100644
--- a/internal/api/plc_compatibility_test.go
+++ b/internal/api/plc_compatibility_test.go
@@ -21,25 +21,55 @@ import (
"time"
)
-func TestPLCCompatibilityGetDIDMatchesStoredDocument(t *testing.T) {
- ts, store, _, cleanup := newCompatibilityServer(t)
+func TestPLCCompatibilityGetDIDShape(t *testing.T) {
+ ts, _, _, cleanup := newCompatibilityServer(t)
defer cleanup()
- state, ok, err := store.GetState("did:plc:alice")
+ resp, err := http.Get(ts.URL + "/did:plc:alice")
if err != nil {
- t.Fatalf("get state: %v", err)
+ t.Fatalf("get did: %v", err)
}
- if !ok {
- t.Fatalf("state not found")
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ t.Fatalf("status: got %d want 200", resp.StatusCode)
}
- resp, err := http.Get(ts.URL + "/did:plc:alice")
+ if got := resp.Header.Get("Content-Type"); !strings.Contains(got, "application/did+ld+json") {
+ t.Fatalf("content-type mismatch: %s", got)
+ }
+
+ var body map[string]any
+
+ if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
+ t.Fatalf("decode body: %v", err)
+ }
+
+ if _, ok := body["@context"].([]any); !ok {
+ t.Fatalf("missing or invalid @context: %#v", body["@context"])
+ }
+
+ if got, _ := body["id"].(string); got != "did:plc:alice" {
+ t.Fatalf("id mismatch: got %q want %q", got, "did:plc:alice")
+ }
+
+ if _, ok := body["authentication"]; ok {
+ t.Fatalf("unexpected authentication field in compatibility did document")
+ }
+}
+
+func TestPLCCompatibilityGetDataShape(t *testing.T) {
+ ts, _, _, cleanup := newCompatibilityServer(t)
+
+ defer cleanup()
+
+ resp, err := http.Get(ts.URL + "/did:plc:alice/data")
if err != nil {
- t.Fatalf("get did: %v", err)
+ t.Fatalf("get data: %v", err)
}
defer resp.Body.Close()
@@ -48,14 +78,34 @@ func TestPLCCompatibilityGetDIDMatchesStoredDocument(t *testing.T) {
t.Fatalf("status: got %d want 200", resp.StatusCode)
}
- if got := resp.Header.Get("Content-Type"); !strings.Contains(got, "application/did+ld+json") {
- t.Fatalf("content-type mismatch: %s", got)
+ var body map[string]any
+
+ if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
+ t.Fatalf("decode body: %v", err)
}
- body, _ := io.ReadAll(resp.Body)
+ required := []string{"did", "verificationMethods", "rotationKeys", "alsoKnownAs", "services"}
+
+ for _, key := range required {
+ if _, ok := body[key]; !ok {
+ t.Fatalf("missing key %q in /data response", key)
+ }
+ }
- if strings.TrimSpace(string(body)) != strings.TrimSpace(string(state.DIDDocument)) {
- t.Fatalf("did document mismatch\n got: %s\nwant: %s", string(body), string(state.DIDDocument))
+ if _, ok := body["verificationMethods"].(map[string]any); !ok {
+ t.Fatalf("verificationMethods has wrong type: %#v", body["verificationMethods"])
+ }
+
+ if _, ok := body["rotationKeys"].([]any); !ok {
+ t.Fatalf("rotationKeys has wrong type: %#v", body["rotationKeys"])
+ }
+
+ if _, ok := body["alsoKnownAs"].([]any); !ok {
+ t.Fatalf("alsoKnownAs has wrong type: %#v", body["alsoKnownAs"])
+ }
+
+ if _, ok := body["services"].(map[string]any); !ok {
+ t.Fatalf("services has wrong type: %#v", body["services"])
}
}
@@ -164,6 +214,20 @@ func TestPLCCompatibilityPostIsMethodNotAllowed(t *testing.T) {
if allow := resp.Header.Get("Allow"); allow != http.MethodGet {
t.Fatalf("allow header mismatch: got %q want %q", allow, http.MethodGet)
}
+
+ var body map[string]any
+
+ if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
+ t.Fatalf("decode body: %v", err)
+ }
+
+ if _, ok := body["message"].(string); !ok {
+ t.Fatalf("expected PLC-style message field, got: %v", body)
+ }
+
+ if _, ok := body["error"]; ok {
+ t.Fatalf("unexpected internal error field in compatibility response: %v", body)
+ }
}
func TestPLCCompatibilityNoVerificationMetadataLeak(t *testing.T) {
@@ -206,6 +270,38 @@ func TestPLCCompatibilityProofEndpointStillWorks(t *testing.T) {
}
}
+func TestPLCCompatibilityNotFoundUsesPLCErrorShape(t *testing.T) {
+ ts, _, _, cleanup := newCompatibilityServer(t)
+
+ defer cleanup()
+
+ resp, err := http.Get(ts.URL + "/did:plc:not-registered")
+
+ if err != nil {
+ t.Fatalf("get missing did: %v", err)
+ }
+
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusNotFound {
+ t.Fatalf("status: got %d want 404", resp.StatusCode)
+ }
+
+ var body map[string]any
+
+ if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
+ t.Fatalf("decode body: %v", err)
+ }
+
+ if _, ok := body["message"].(string); !ok {
+ t.Fatalf("expected message field for 404, got: %v", body)
+ }
+
+ if _, ok := body["error"]; ok {
+ t.Fatalf("unexpected error field in compatibility 404 body: %v", body)
+ }
+}
+
func newCompatibilityServer(t *testing.T) (*httptest.Server, *storage.PebbleStore, []types.ExportRecord, func()) {
t.Helper()
diff --git a/internal/api/server.go b/internal/api/server.go
index a3ef211..e9e1e15 100644
--- a/internal/api/server.go
+++ b/internal/api/server.go
@@ -14,6 +14,7 @@ import (
"github.com/Fuwn/plutia/internal/types"
"github.com/Fuwn/plutia/pkg/proof"
"net/http"
+ "sort"
"strconv"
"strings"
"time"
@@ -364,11 +365,39 @@ type plcAuditEntry struct {
CreatedAt string `json:"createdAt"`
}
+type plcDIDDocument struct {
+ Context []string `json:"@context"`
+ ID string `json:"id"`
+ AlsoKnownAs []string `json:"alsoKnownAs,omitempty"`
+ VerificationMethod []plcVerificationEntry `json:"verificationMethod,omitempty"`
+ Service []plcServiceEntry `json:"service,omitempty"`
+ Deactivated bool `json:"deactivated,omitempty"`
+}
+
+type plcVerificationEntry struct {
+ ID string `json:"id"`
+ Type string `json:"type"`
+ Controller string `json:"controller"`
+ PublicKeyMultibase string `json:"publicKeyMultibase"`
+}
+
+type plcServiceEntry struct {
+ ID string `json:"id"`
+ Type string `json:"type"`
+ ServiceEndpoint string `json:"serviceEndpoint"`
+}
+
+var plcDIDContexts = []string{
+ "https://www.w3.org/ns/did/v1",
+ "https://w3id.org/security/multikey/v1",
+ "https://w3id.org/security/suites/secp256k1-2019/v1",
+}
+
func (s *Server) handlePLCCompatibility(w http.ResponseWriter, r *http.Request) {
path := strings.TrimPrefix(r.URL.Path, "/")
if path == "" {
- writeErr(w, http.StatusNotFound, fmt.Errorf("not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "not found")
return
}
@@ -383,28 +412,28 @@ func (s *Server) handlePLCCompatibility(w http.ResponseWriter, r *http.Request)
did := parts[0]
if !strings.HasPrefix(did, "did:") {
- writeErr(w, http.StatusNotFound, fmt.Errorf("not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "not found")
return
}
if r.Method == http.MethodPost && len(parts) == 1 {
w.Header().Set("Allow", http.MethodGet)
- writeErr(w, http.StatusMethodNotAllowed, fmt.Errorf("write operations are not supported by this mirror"))
+ writeCompatibilityErr(w, http.StatusMethodNotAllowed, "write operations are not supported by this mirror")
return
}
if r.Method != http.MethodGet {
w.Header().Set("Allow", http.MethodGet)
- writeErr(w, http.StatusMethodNotAllowed, fmt.Errorf("method not allowed"))
+ writeCompatibilityErr(w, http.StatusMethodNotAllowed, "method not allowed")
return
}
switch {
case len(parts) == 1:
- s.handleGetDIDCompatibility(w, did)
+ s.handleGetDIDCompatibility(w, r, did)
case len(parts) == 2 && parts[1] == "data":
s.handleGetDIDDataCompatibility(w, r, did)
case len(parts) == 2 && parts[1] == "log":
@@ -414,40 +443,63 @@ func (s *Server) handlePLCCompatibility(w http.ResponseWriter, r *http.Request)
case len(parts) == 3 && parts[1] == "log" && parts[2] == "audit":
s.handleGetDIDLogAuditCompatibility(w, r, did)
default:
- writeErr(w, http.StatusNotFound, fmt.Errorf("not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "not found")
}
}
-func (s *Server) handleGetDIDCompatibility(w http.ResponseWriter, did string) {
+func (s *Server) handleGetDIDCompatibility(w http.ResponseWriter, r *http.Request, did string) {
state, ok, err := s.store.GetState(did)
if err != nil {
- writeErr(w, http.StatusInternalServerError, err)
+ writeCompatibilityErr(w, http.StatusInternalServerError, err.Error())
return
}
if !ok {
- writeErr(w, http.StatusNotFound, fmt.Errorf("did not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "DID not registered: "+did)
+
+ return
+ }
+
+ if s.ingestor == nil {
+ writeCompatibilityErr(w, http.StatusServiceUnavailable, "ingestor unavailable")
+
+ return
+ }
+
+ data, err := s.ingestor.LoadCurrentPLCData(r.Context(), did)
+ if err != nil {
+ if errors.Is(err, ingest.ErrDIDNotFound) {
+ writeCompatibilityErr(w, http.StatusNotFound, "DID not registered: "+did)
+
+ return
+ }
+
+ if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
+ writeCompatibilityErr(w, http.StatusGatewayTimeout, err.Error())
+
+ return
+ }
+
+ writeCompatibilityErr(w, http.StatusInternalServerError, err.Error())
return
}
status := http.StatusOK
- if isTombstonedDIDDocument(state.DIDDocument) {
+ deactivated := isTombstonedDIDDocument(state.DIDDocument)
+ if deactivated {
status = http.StatusGone
}
- w.Header().Set("Content-Type", "application/did+ld+json")
- w.WriteHeader(status)
-
- _, _ = w.Write(state.DIDDocument)
+ writeJSONWithContentType(w, status, "application/did+ld+json", buildPLCDIDDocument(did, data, deactivated))
}
func (s *Server) handleGetDIDLogCompatibility(w http.ResponseWriter, r *http.Request, did string) {
if s.ingestor == nil {
- writeErr(w, http.StatusServiceUnavailable, fmt.Errorf("ingestor unavailable"))
+ writeCompatibilityErr(w, http.StatusServiceUnavailable, "ingestor unavailable")
return
}
@@ -456,18 +508,18 @@ func (s *Server) handleGetDIDLogCompatibility(w http.ResponseWriter, r *http.Req
if err != nil {
if errors.Is(err, ingest.ErrDIDNotFound) || errors.Is(err, ingest.ErrHistoryNotStored) {
- writeErr(w, http.StatusNotFound, fmt.Errorf("did not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "DID not registered: "+did)
return
}
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
- writeErr(w, http.StatusGatewayTimeout, err)
+ writeCompatibilityErr(w, http.StatusGatewayTimeout, err.Error())
return
}
- writeErr(w, http.StatusInternalServerError, err)
+ writeCompatibilityErr(w, http.StatusInternalServerError, err.Error())
return
}
@@ -483,7 +535,7 @@ func (s *Server) handleGetDIDLogCompatibility(w http.ResponseWriter, r *http.Req
func (s *Server) handleGetDIDLogLastCompatibility(w http.ResponseWriter, r *http.Request, did string) {
if s.ingestor == nil {
- writeErr(w, http.StatusServiceUnavailable, fmt.Errorf("ingestor unavailable"))
+ writeCompatibilityErr(w, http.StatusServiceUnavailable, "ingestor unavailable")
return
}
@@ -492,18 +544,18 @@ func (s *Server) handleGetDIDLogLastCompatibility(w http.ResponseWriter, r *http
if err != nil {
if errors.Is(err, ingest.ErrDIDNotFound) || errors.Is(err, ingest.ErrHistoryNotStored) {
- writeErr(w, http.StatusNotFound, fmt.Errorf("did not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "DID not registered: "+did)
return
}
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
- writeErr(w, http.StatusGatewayTimeout, err)
+ writeCompatibilityErr(w, http.StatusGatewayTimeout, err.Error())
return
}
- writeErr(w, http.StatusInternalServerError, err)
+ writeCompatibilityErr(w, http.StatusInternalServerError, err.Error())
return
}
@@ -516,7 +568,7 @@ func (s *Server) handleGetDIDLogLastCompatibility(w http.ResponseWriter, r *http
func (s *Server) handleGetDIDLogAuditCompatibility(w http.ResponseWriter, r *http.Request, did string) {
if s.ingestor == nil {
- writeErr(w, http.StatusServiceUnavailable, fmt.Errorf("ingestor unavailable"))
+ writeCompatibilityErr(w, http.StatusServiceUnavailable, "ingestor unavailable")
return
}
@@ -525,18 +577,18 @@ func (s *Server) handleGetDIDLogAuditCompatibility(w http.ResponseWriter, r *htt
if err != nil {
if errors.Is(err, ingest.ErrDIDNotFound) || errors.Is(err, ingest.ErrHistoryNotStored) {
- writeErr(w, http.StatusNotFound, fmt.Errorf("did not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "DID not registered: "+did)
return
}
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
- writeErr(w, http.StatusGatewayTimeout, err)
+ writeCompatibilityErr(w, http.StatusGatewayTimeout, err.Error())
return
}
- writeErr(w, http.StatusInternalServerError, err)
+ writeCompatibilityErr(w, http.StatusInternalServerError, err.Error())
return
}
@@ -558,7 +610,7 @@ func (s *Server) handleGetDIDLogAuditCompatibility(w http.ResponseWriter, r *htt
func (s *Server) handleGetDIDDataCompatibility(w http.ResponseWriter, r *http.Request, did string) {
if s.ingestor == nil {
- writeErr(w, http.StatusServiceUnavailable, fmt.Errorf("ingestor unavailable"))
+ writeCompatibilityErr(w, http.StatusServiceUnavailable, "ingestor unavailable")
return
}
@@ -567,18 +619,18 @@ func (s *Server) handleGetDIDDataCompatibility(w http.ResponseWriter, r *http.Re
if err != nil {
if errors.Is(err, ingest.ErrDIDNotFound) {
- writeErr(w, http.StatusNotFound, fmt.Errorf("did not found"))
+ writeCompatibilityErr(w, http.StatusNotFound, "DID not registered: "+did)
return
}
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
- writeErr(w, http.StatusGatewayTimeout, err)
+ writeCompatibilityErr(w, http.StatusGatewayTimeout, err.Error())
return
}
- writeErr(w, http.StatusInternalServerError, err)
+ writeCompatibilityErr(w, http.StatusInternalServerError, err.Error())
return
}
@@ -589,13 +641,13 @@ func (s *Server) handleGetDIDDataCompatibility(w http.ResponseWriter, r *http.Re
func (s *Server) handleExportCompatibility(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
w.Header().Set("Allow", http.MethodGet)
- writeErr(w, http.StatusMethodNotAllowed, fmt.Errorf("method not allowed"))
+ writeCompatibilityErr(w, http.StatusMethodNotAllowed, "method not allowed")
return
}
if s.ingestor == nil {
- writeErr(w, http.StatusServiceUnavailable, fmt.Errorf("ingestor unavailable"))
+ writeCompatibilityErr(w, http.StatusServiceUnavailable, "ingestor unavailable")
return
}
@@ -606,7 +658,7 @@ func (s *Server) handleExportCompatibility(w http.ResponseWriter, r *http.Reques
n, err := strconv.Atoi(rawCount)
if err != nil || n < 1 {
- writeErr(w, http.StatusBadRequest, fmt.Errorf("invalid count query parameter"))
+ writeCompatibilityErr(w, http.StatusBadRequest, "invalid count query parameter")
return
}
@@ -624,7 +676,7 @@ func (s *Server) handleExportCompatibility(w http.ResponseWriter, r *http.Reques
parsed, err := time.Parse(time.RFC3339, rawAfter)
if err != nil {
- writeErr(w, http.StatusBadRequest, fmt.Errorf("invalid after query parameter"))
+ writeCompatibilityErr(w, http.StatusBadRequest, "invalid after query parameter")
return
}
@@ -679,6 +731,177 @@ func isTombstonedDIDDocument(raw []byte) bool {
return deactivated
}
+func buildPLCDIDDocument(did string, plcData map[string]any, deactivated bool) plcDIDDocument {
+ doc := plcDIDDocument{
+ Context: append([]string(nil), plcDIDContexts...),
+ ID: did,
+ AlsoKnownAs: extractStringArray(plcData["alsoKnownAs"]),
+ Deactivated: deactivated,
+ }
+
+ verificationMethods := extractVerificationMethodMap(plcData["verificationMethods"])
+ if len(verificationMethods) > 0 {
+ names := make([]string, 0, len(verificationMethods))
+ for name := range verificationMethods {
+ names = append(names, name)
+ }
+
+ sort.Strings(names)
+
+ doc.VerificationMethod = make([]plcVerificationEntry, 0, len(names))
+ for _, name := range names {
+ value := verificationMethods[name]
+ if strings.TrimSpace(value) == "" {
+ continue
+ }
+
+ doc.VerificationMethod = append(doc.VerificationMethod, plcVerificationEntry{
+ ID: did + "#" + name,
+ Type: "Multikey",
+ Controller: did,
+ PublicKeyMultibase: value,
+ })
+ }
+ }
+
+ services := extractServicesMap(plcData["services"])
+ if len(services) > 0 {
+ names := make([]string, 0, len(services))
+ for name := range services {
+ names = append(names, name)
+ }
+
+ sort.Strings(names)
+
+ doc.Service = make([]plcServiceEntry, 0, len(names))
+ for _, name := range names {
+ entry := services[name]
+ typ := entry["type"]
+ endpoint := entry["endpoint"]
+
+ if strings.TrimSpace(endpoint) == "" {
+ continue
+ }
+
+ doc.Service = append(doc.Service, plcServiceEntry{
+ ID: "#" + name,
+ Type: typ,
+ ServiceEndpoint: endpoint,
+ })
+ }
+ }
+
+ return doc
+}
+
+func extractStringArray(v any) []string {
+ switch raw := v.(type) {
+ case []string:
+ out := make([]string, 0, len(raw))
+ for _, item := range raw {
+ item = strings.TrimSpace(item)
+ if item == "" {
+ continue
+ }
+
+ out = append(out, item)
+ }
+
+ return out
+ case []any:
+ out := make([]string, 0, len(raw))
+ for _, item := range raw {
+ s, _ := item.(string)
+ if strings.TrimSpace(s) == "" {
+ continue
+ }
+
+ out = append(out, s)
+ }
+
+ return out
+ default:
+ return nil
+ }
+}
+
+func extractVerificationMethodMap(v any) map[string]string {
+ out := map[string]string{}
+
+ switch vm := v.(type) {
+ case map[string]string:
+ for name, key := range vm {
+ if strings.TrimSpace(key) == "" {
+ continue
+ }
+
+ out[name] = key
+ }
+ case map[string]any:
+ for name, raw := range vm {
+ key, _ := raw.(string)
+ if strings.TrimSpace(key) == "" {
+ continue
+ }
+
+ out[name] = key
+ }
+ }
+
+ return out
+}
+
+func extractServicesMap(v any) map[string]map[string]string {
+ out := map[string]map[string]string{}
+
+ switch services := v.(type) {
+ case map[string]map[string]string:
+ for name, entry := range services {
+ endpoint := strings.TrimSpace(entry["endpoint"])
+ if endpoint == "" {
+ endpoint = strings.TrimSpace(entry["serviceEndpoint"])
+ }
+
+ if endpoint == "" {
+ continue
+ }
+
+ out[name] = map[string]string{
+ "type": entry["type"],
+ "endpoint": endpoint,
+ }
+ }
+ case map[string]any:
+ for name, raw := range services {
+ entry, ok := raw.(map[string]any)
+ if !ok {
+ continue
+ }
+
+ typ, _ := entry["type"].(string)
+ endpoint, _ := entry["endpoint"].(string)
+ if endpoint == "" {
+ endpoint, _ = entry["serviceEndpoint"].(string)
+ }
+
+ if strings.TrimSpace(endpoint) == "" {
+ continue
+ }
+
+ out[name] = map[string]string{
+ "type": typ,
+ "endpoint": endpoint,
+ }
+ }
+ }
+
+ return out
+}
+
+func writeCompatibilityErr(w http.ResponseWriter, code int, message string) {
+ writeJSON(w, code, map[string]any{"message": message})
+}
+
func (s *Server) withTimeout(next http.Handler) http.Handler {
timeout := s.cfg.RequestTimeout
diff --git a/internal/ingest/service.go b/internal/ingest/service.go
index 1ea2a6e..6cb7512 100644
--- a/internal/ingest/service.go
+++ b/internal/ingest/service.go
@@ -920,24 +920,24 @@ func (s *Service) LoadCurrentPLCData(ctx context.Context, did string) (map[strin
return nil, err
}
- if s.cfg.Mode != config.ModeMirror || s.blockLog == nil {
- state, ok, err := s.store.GetState(did)
+ stateVal, stateOK, err := s.store.GetState(did)
- if err != nil {
- return nil, err
- }
+ if err != nil {
+ return nil, err
+ }
- if !ok {
- return nil, ErrDIDNotFound
- }
+ if !stateOK {
+ return nil, ErrDIDNotFound
+ }
+ if s.cfg.Mode != config.ModeMirror || s.blockLog == nil {
var doc map[string]any
- if err := json.Unmarshal(state.DIDDocument, &doc); err != nil {
+ if err := json.Unmarshal(stateVal.DIDDocument, &doc); err != nil {
return nil, err
}
- return doc, nil
+ return normalizePLCDataFromDIDDocument(did, doc, stateVal.RotationKeys), nil
}
last, err := s.LoadLatestDIDOperation(ctx, did)
@@ -952,12 +952,7 @@ func (s *Service) LoadCurrentPLCData(ctx context.Context, did string) (map[strin
return nil, fmt.Errorf("decode latest operation: %w", err)
}
- delete(op, "sig")
- delete(op, "signature")
- delete(op, "sigPayload")
- delete(op, "signaturePayload")
-
- return op, nil
+ return normalizePLCDataFromOperation(did, op, stateVal.RotationKeys), nil
}
func (s *Service) StreamExport(ctx context.Context, after time.Time, limit int, emit func(types.ExportRecord) error) error {
@@ -1046,6 +1041,236 @@ func detectNullified(operation []byte) bool {
return v
}
+func normalizePLCDataFromOperation(did string, op map[string]any, fallbackRotation []string) map[string]any {
+ data := map[string]any{
+ "did": did,
+ "verificationMethods": map[string]string{},
+ "rotationKeys": []string{},
+ "alsoKnownAs": []string{},
+ "services": map[string]map[string]string{},
+ }
+
+ if aka := extractStringSlice(op["alsoKnownAs"]); len(aka) > 0 {
+ data["alsoKnownAs"] = aka
+ } else if handle, _ := op["handle"].(string); strings.TrimSpace(handle) != "" {
+ data["alsoKnownAs"] = []string{"at://" + handle}
+ }
+
+ services := normalizeServices(op)
+ if len(services) > 0 {
+ data["services"] = services
+ }
+
+ verificationMethods := normalizeVerificationMethods(op)
+ if len(verificationMethods) > 0 {
+ data["verificationMethods"] = verificationMethods
+ }
+
+ rotationKeys := extractStringSlice(op["rotationKeys"])
+ if len(rotationKeys) == 0 {
+ if recovery, _ := op["recoveryKey"].(string); strings.TrimSpace(recovery) != "" {
+ rotationKeys = append(rotationKeys, recovery)
+ }
+ }
+
+ if len(rotationKeys) == 0 && len(fallbackRotation) > 0 {
+ rotationKeys = append(rotationKeys, fallbackRotation...)
+ }
+
+ data["rotationKeys"] = dedupeStrings(rotationKeys)
+
+ return data
+}
+
+func normalizePLCDataFromDIDDocument(did string, doc map[string]any, fallbackRotation []string) map[string]any {
+ data := map[string]any{
+ "did": did,
+ "verificationMethods": map[string]string{},
+ "rotationKeys": dedupeStrings(fallbackRotation),
+ "alsoKnownAs": []string{},
+ "services": map[string]map[string]string{},
+ }
+
+ if aka := extractStringSlice(doc["alsoKnownAs"]); len(aka) > 0 {
+ data["alsoKnownAs"] = aka
+ }
+
+ if rawVMList, ok := doc["verificationMethod"].([]any); ok {
+ verificationMethods := map[string]string{}
+
+ for _, rawVM := range rawVMList {
+ vm, ok := rawVM.(map[string]any)
+ if !ok {
+ continue
+ }
+
+ name := "atproto"
+ if id, _ := vm["id"].(string); id != "" {
+ if idx := strings.LastIndex(id, "#"); idx >= 0 && idx < len(id)-1 {
+ name = id[idx+1:]
+ }
+ }
+
+ if key, _ := vm["publicKeyMultibase"].(string); strings.TrimSpace(key) != "" {
+ verificationMethods[name] = key
+ }
+ }
+
+ if len(verificationMethods) > 0 {
+ data["verificationMethods"] = verificationMethods
+ }
+ }
+
+ if rawServiceList, ok := doc["service"].([]any); ok {
+ services := map[string]map[string]string{}
+
+ for _, rawService := range rawServiceList {
+ service, ok := rawService.(map[string]any)
+ if !ok {
+ continue
+ }
+
+ name := "atproto_pds"
+ if id, _ := service["id"].(string); id != "" {
+ name = strings.TrimPrefix(id, "#")
+ if strings.TrimSpace(name) == "" {
+ name = "atproto_pds"
+ }
+ }
+
+ typ, _ := service["type"].(string)
+ endpoint, _ := service["serviceEndpoint"].(string)
+ if endpoint == "" {
+ endpoint, _ = service["endpoint"].(string)
+ }
+
+ if strings.TrimSpace(endpoint) == "" {
+ continue
+ }
+
+ services[name] = map[string]string{
+ "type": typ,
+ "endpoint": endpoint,
+ }
+ }
+
+ if len(services) > 0 {
+ data["services"] = services
+ }
+ }
+
+ return data
+}
+
+func normalizeServices(op map[string]any) map[string]map[string]string {
+ services := map[string]map[string]string{}
+
+ if rawServices, ok := op["services"].(map[string]any); ok {
+ for name, rawEntry := range rawServices {
+ entry, ok := rawEntry.(map[string]any)
+ if !ok {
+ continue
+ }
+
+ typ, _ := entry["type"].(string)
+ endpoint, _ := entry["endpoint"].(string)
+ if endpoint == "" {
+ endpoint, _ = entry["serviceEndpoint"].(string)
+ }
+
+ if strings.TrimSpace(endpoint) == "" {
+ continue
+ }
+
+ services[name] = map[string]string{
+ "type": typ,
+ "endpoint": endpoint,
+ }
+ }
+
+ return services
+ }
+
+ if endpoint, _ := op["service"].(string); strings.TrimSpace(endpoint) != "" {
+ services["atproto_pds"] = map[string]string{
+ "type": "AtprotoPersonalDataServer",
+ "endpoint": endpoint,
+ }
+ }
+
+ return services
+}
+
+func normalizeVerificationMethods(op map[string]any) map[string]string {
+ methods := map[string]string{}
+
+ if rawVM, ok := op["verificationMethods"].(map[string]any); ok {
+ for name, rawValue := range rawVM {
+ value, _ := rawValue.(string)
+ if strings.TrimSpace(value) == "" {
+ continue
+ }
+
+ methods[name] = value
+ }
+
+ if len(methods) > 0 {
+ return methods
+ }
+ }
+
+ if signingKey, _ := op["signingKey"].(string); strings.TrimSpace(signingKey) != "" {
+ methods["atproto"] = signingKey
+ }
+
+ return methods
+}
+
+func extractStringSlice(v any) []string {
+ raw, ok := v.([]any)
+ if !ok {
+ return nil
+ }
+
+ out := make([]string, 0, len(raw))
+
+ for _, item := range raw {
+ s, _ := item.(string)
+ if strings.TrimSpace(s) == "" {
+ continue
+ }
+
+ out = append(out, s)
+ }
+
+ return out
+}
+
+func dedupeStrings(input []string) []string {
+ if len(input) == 0 {
+ return []string{}
+ }
+
+ out := make([]string, 0, len(input))
+ seen := make(map[string]struct{}, len(input))
+
+ for _, item := range input {
+ item = strings.TrimSpace(item)
+ if item == "" {
+ continue
+ }
+
+ if _, ok := seen[item]; ok {
+ continue
+ }
+
+ seen[item] = struct{}{}
+ out = append(out, item)
+ }
+
+ return out
+}
+
func (s *Service) Flush(ctx context.Context) error {
_ = ctx