aboutsummaryrefslogtreecommitdiff
path: root/cmd/plccompat
diff options
context:
space:
mode:
authorFuwn <[email protected]>2026-02-26 20:20:09 -0800
committerFuwn <[email protected]>2026-02-26 20:20:09 -0800
commit80dc07bf7bbf3ee9f7191a0446199d74cbb2d341 (patch)
tree746f1518723b64a5270131a1e19b6c6f5a312926 /cmd/plccompat
parentchore: remove accidentally committed binary (diff)
downloadplutia-test-80dc07bf7bbf3ee9f7191a0446199d74cbb2d341.tar.xz
plutia-test-80dc07bf7bbf3ee9f7191a0446199d74cbb2d341.zip
fix: align PLC compatibility read endpoints with plc.directory schema
Diffstat (limited to 'cmd/plccompat')
-rw-r--r--cmd/plccompat/main.go879
1 files changed, 879 insertions, 0 deletions
diff --git a/cmd/plccompat/main.go b/cmd/plccompat/main.go
new file mode 100644
index 0000000..133a766
--- /dev/null
+++ b/cmd/plccompat/main.go
@@ -0,0 +1,879 @@
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/json"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+ "math/rand"
+ "net/http"
+ "net/url"
+ "os"
+ "sort"
+ "strings"
+ "time"
+)
+
+type endpointResult struct {
+ Endpoint string `json:"endpoint"`
+ Context string `json:"context,omitempty"`
+ Compatible bool `json:"compatible"`
+ StatusLocal int `json:"status_local"`
+ StatusUpstream int `json:"status_upstream"`
+ ContentTypeLocal string `json:"content_type_local"`
+ ContentTypeUp string `json:"content_type_upstream"`
+ Issues []string `json:"issues,omitempty"`
+}
+
+type report struct {
+ GeneratedAt string `json:"generated_at"`
+ LocalBase string `json:"local_base"`
+ UpstreamBase string `json:"upstream_base"`
+ SelectedDIDs []string `json:"selected_dids"`
+ TombstoneDID string `json:"tombstone_did,omitempty"`
+ LegacyCreateDID string `json:"legacy_create_did,omitempty"`
+ Results []endpointResult `json:"results"`
+ AllCompatible bool `json:"all_compatible"`
+ CompatibilityNote string `json:"compatibility_note,omitempty"`
+}
+
+type fetchResp struct {
+ status int
+ contentType string
+ body []byte
+}
+
+func main() {
+ var (
+ localBase = flag.String("local", "http://127.0.0.1:8080", "local Plutia base URL")
+ upstreamBase = flag.String("upstream", "https://plc.directory", "upstream PLC directory base URL")
+ count = flag.Int("count", 10, "export count for matrix export checks")
+ sampleN = flag.Int("sample", 5, "random DID samples from local export")
+ seed = flag.Int64("seed", time.Now().UnixNano(), "random seed")
+ )
+ flag.Parse()
+
+ if *count < 1 {
+ fmt.Fprintln(os.Stderr, "count must be >= 1")
+ os.Exit(2)
+ }
+ if *sampleN < 1 {
+ fmt.Fprintln(os.Stderr, "sample must be >= 1")
+ os.Exit(2)
+ }
+
+ client := &http.Client{Timeout: 30 * time.Second}
+
+ dids, tombstone, legacy, err := discoverDIDs(client, strings.TrimRight(*localBase, "/"), 2500, *sampleN, *seed)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "discover dids: %v\n", err)
+ os.Exit(1)
+ }
+
+ rep := report{
+ GeneratedAt: time.Now().UTC().Format(time.RFC3339),
+ LocalBase: strings.TrimRight(*localBase, "/"),
+ UpstreamBase: strings.TrimRight(*upstreamBase, "/"),
+ SelectedDIDs: dids,
+ TombstoneDID: tombstone,
+ LegacyCreateDID: legacy,
+ AllCompatible: true,
+ }
+
+ for _, did := range dids {
+ for _, suffix := range []string{"", "/log", "/log/last", "/log/audit", "/data"} {
+ path := "/" + did + suffix
+ res := compareJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, path, "did="+did)
+ rep.Results = append(rep.Results, res)
+ if !res.Compatible {
+ rep.AllCompatible = false
+ }
+ }
+ }
+
+ // Export schema + shape comparison.
+ exportPath := fmt.Sprintf("/export?count=%d", *count)
+ exportRes := compareNDJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, exportPath)
+ rep.Results = append(rep.Results, exportRes)
+ if !exportRes.Compatible {
+ rep.AllCompatible = false
+ }
+
+ // 404 shape comparison.
+ missingDID := "did:plc:this-does-not-exist-for-compat-check"
+ notFoundRes := compareJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, "/"+missingDID, "missing_did")
+ rep.Results = append(rep.Results, notFoundRes)
+ if !notFoundRes.Compatible {
+ rep.AllCompatible = false
+ }
+
+ // 410 shape comparison where available.
+ if tombstone != "" {
+ goneRes := compareJSONEndpoint(client, rep.LocalBase, rep.UpstreamBase, "/"+tombstone, "tombstone_did")
+ rep.Results = append(rep.Results, goneRes)
+ if !goneRes.Compatible {
+ rep.AllCompatible = false
+ }
+ }
+
+ if !rep.AllCompatible {
+ rep.CompatibilityNote = "incompatibilities detected; inspect issues per endpoint"
+ }
+
+ enc := json.NewEncoder(os.Stdout)
+ enc.SetIndent("", " ")
+ _ = enc.Encode(rep)
+
+ if !rep.AllCompatible {
+ os.Exit(1)
+ }
+}
+
+func discoverDIDs(client *http.Client, base string, count int, sampleN int, seed int64) ([]string, string, string, error) {
+ path := fmt.Sprintf("%s/export?count=%d", base, count)
+ res, err := doFetch(client, path)
+ if err != nil {
+ return nil, "", "", err
+ }
+ if res.status != http.StatusOK {
+ return nil, "", "", fmt.Errorf("local export status=%d", res.status)
+ }
+
+ type rec struct {
+ DID string `json:"did"`
+ Operation map[string]interface{} `json:"operation"`
+ }
+
+ r := bufio.NewScanner(bytes.NewReader(res.body))
+ r.Buffer(make([]byte, 0, 64*1024), 10*1024*1024)
+ unique := map[string]struct{}{}
+ ordered := make([]string, 0, count)
+ tombstone := ""
+ legacy := ""
+
+ for r.Scan() {
+ line := bytes.TrimSpace(r.Bytes())
+ if len(line) == 0 {
+ continue
+ }
+ var row rec
+ if err := json.Unmarshal(line, &row); err != nil {
+ continue
+ }
+ if row.DID == "" {
+ continue
+ }
+ if _, ok := unique[row.DID]; !ok {
+ unique[row.DID] = struct{}{}
+ ordered = append(ordered, row.DID)
+ }
+ typ, _ := row.Operation["type"].(string)
+ switch typ {
+ case "plc_tombstone", "tombstone":
+ if tombstone == "" {
+ tombstone = row.DID
+ }
+ case "create":
+ if legacy == "" {
+ legacy = row.DID
+ }
+ }
+ }
+ if err := r.Err(); err != nil {
+ return nil, "", "", err
+ }
+
+ if len(ordered) == 0 {
+ return nil, "", "", errors.New("no dids discovered from local export")
+ }
+
+ if sampleN > len(ordered) {
+ sampleN = len(ordered)
+ }
+
+ rng := rand.New(rand.NewSource(seed))
+ perm := rng.Perm(len(ordered))
+ out := make([]string, 0, sampleN+2)
+ for i := 0; i < sampleN; i++ {
+ out = append(out, ordered[perm[i]])
+ }
+ if tombstone != "" && !contains(out, tombstone) {
+ out = append(out, tombstone)
+ }
+ if legacy != "" && !contains(out, legacy) {
+ out = append(out, legacy)
+ }
+
+ return out, tombstone, legacy, nil
+}
+
+func compareJSONEndpoint(client *http.Client, localBase, upstreamBase, path, ctx string) endpointResult {
+ res := endpointResult{Endpoint: path, Context: ctx, Compatible: true}
+
+ localURL := localBase + path
+ upURL := upstreamBase + path
+
+ l, lErr := doFetch(client, localURL)
+ u, uErr := doFetch(client, upURL)
+ if lErr != nil || uErr != nil {
+ res.Compatible = false
+ if lErr != nil {
+ res.Issues = append(res.Issues, "local fetch error: "+lErr.Error())
+ }
+ if uErr != nil {
+ res.Issues = append(res.Issues, "upstream fetch error: "+uErr.Error())
+ }
+ return res
+ }
+
+ res.StatusLocal = l.status
+ res.StatusUpstream = u.status
+ res.ContentTypeLocal = l.contentType
+ res.ContentTypeUp = u.contentType
+
+ if l.status != u.status {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("status mismatch local=%d upstream=%d", l.status, u.status))
+ }
+ if normalizeContentType(l.contentType) != normalizeContentType(u.contentType) {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("content-type mismatch local=%q upstream=%q", normalizeContentType(l.contentType), normalizeContentType(u.contentType)))
+ }
+
+ if isJSONish(normalizeContentType(l.contentType)) && isJSONish(normalizeContentType(u.contentType)) {
+ kind := endpointKind(path)
+ issues := append(
+ validateEndpointJSONShape(kind, l.status, l.body, "local"),
+ validateEndpointJSONShape(kind, u.status, u.body, "upstream")...,
+ )
+
+ switch kind {
+ case "did", "data", "error":
+ lm, um, strictIssues := compareJSONBodies(l.body, u.body)
+ if lm || um || len(strictIssues) > 0 {
+ issues = append(issues, strictIssues...)
+ }
+ }
+
+ if len(issues) > 0 {
+ res.Compatible = false
+ res.Issues = append(res.Issues, issues...)
+ }
+ }
+
+ return res
+}
+
+func compareNDJSONEndpoint(client *http.Client, localBase, upstreamBase, path string) endpointResult {
+ res := endpointResult{Endpoint: path, Context: "export", Compatible: true}
+ localURL := localBase + path
+ upURL := upstreamBase + path
+
+ l, lErr := doFetch(client, localURL)
+ u, uErr := doFetch(client, upURL)
+ if lErr != nil || uErr != nil {
+ res.Compatible = false
+ if lErr != nil {
+ res.Issues = append(res.Issues, "local fetch error: "+lErr.Error())
+ }
+ if uErr != nil {
+ res.Issues = append(res.Issues, "upstream fetch error: "+uErr.Error())
+ }
+ return res
+ }
+
+ res.StatusLocal = l.status
+ res.StatusUpstream = u.status
+ res.ContentTypeLocal = l.contentType
+ res.ContentTypeUp = u.contentType
+
+ if l.status != u.status {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("status mismatch local=%d upstream=%d", l.status, u.status))
+ }
+ if normalizeContentType(l.contentType) != normalizeContentType(u.contentType) {
+ res.Compatible = false
+ res.Issues = append(res.Issues, fmt.Sprintf("content-type mismatch local=%q upstream=%q", normalizeContentType(l.contentType), normalizeContentType(u.contentType)))
+ }
+
+ localRows, lErr := parseNDJSON(l.body)
+ upRows, uErr := parseNDJSON(u.body)
+ if lErr != nil || uErr != nil {
+ res.Compatible = false
+ if lErr != nil {
+ res.Issues = append(res.Issues, "local ndjson parse error: "+lErr.Error())
+ }
+ if uErr != nil {
+ res.Issues = append(res.Issues, "upstream ndjson parse error: "+uErr.Error())
+ }
+ return res
+ }
+
+ for i, row := range localRows {
+ issues := validateExportShape(row)
+ for _, issue := range issues {
+ res.Issues = append(res.Issues, fmt.Sprintf("local line %d: %s", i+1, issue))
+ }
+ }
+ for i, row := range upRows {
+ issues := validateExportShape(row)
+ for _, issue := range issues {
+ res.Issues = append(res.Issues, fmt.Sprintf("upstream line %d: %s", i+1, issue))
+ }
+ }
+
+ if len(res.Issues) > 0 {
+ res.Compatible = false
+ }
+
+ return res
+}
+
+func endpointKind(path string) string {
+ switch {
+ case strings.HasSuffix(path, "/log/audit"):
+ return "log_audit"
+ case strings.HasSuffix(path, "/log/last"):
+ return "log_last"
+ case strings.HasSuffix(path, "/log"):
+ return "log"
+ case strings.HasSuffix(path, "/data"):
+ return "data"
+ case strings.HasPrefix(path, "/did:"):
+ return "did"
+ default:
+ return "error"
+ }
+}
+
+func validateEndpointJSONShape(kind string, status int, body []byte, side string) []string {
+ decoded, err := decodeJSON(body)
+ if err != nil {
+ return []string{fmt.Sprintf("%s json decode error: %v", side, err)}
+ }
+
+ switch kind {
+ case "did":
+ switch status {
+ case http.StatusOK:
+ return prefixIssues(side, validateDIDDocumentShape(decoded))
+ case http.StatusGone:
+ issues := validateDIDDocumentShape(decoded)
+ if len(issues) == 0 {
+ return nil
+ }
+
+ if msgIssues := validateMessageErrorShape(decoded); len(msgIssues) == 0 {
+ return nil
+ }
+
+ return prefixIssues(side, issues)
+ case http.StatusNotFound:
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ default:
+ return nil
+ }
+ case "data":
+ if status == http.StatusOK {
+ return prefixIssues(side, validatePLCDataShape(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ case "log":
+ if status == http.StatusOK {
+ return prefixIssues(side, validateOperationArray(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ case "log_last":
+ if status == http.StatusOK {
+ return prefixIssues(side, validateOperationShape(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ case "log_audit":
+ if status == http.StatusOK {
+ return prefixIssues(side, validateAuditArrayShape(decoded))
+ }
+
+ if status == http.StatusNotFound {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ default:
+ if status == http.StatusNotFound || status == http.StatusMethodNotAllowed {
+ return prefixIssues(side, validateMessageErrorShape(decoded))
+ }
+
+ return nil
+ }
+}
+
+func decodeJSON(body []byte) (interface{}, error) {
+ var v interface{}
+ dec := json.NewDecoder(bytes.NewReader(body))
+ dec.UseNumber()
+
+ if err := dec.Decode(&v); err != nil {
+ return nil, err
+ }
+
+ return v, nil
+}
+
+func prefixIssues(side string, issues []string) []string {
+ if len(issues) == 0 {
+ return nil
+ }
+
+ out := make([]string, 0, len(issues))
+ for _, issue := range issues {
+ out = append(out, fmt.Sprintf("%s %s", side, issue))
+ }
+
+ return out
+}
+
+func validateMessageErrorShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object error body"}
+ }
+
+ msg, ok := obj["message"].(string)
+ if !ok || strings.TrimSpace(msg) == "" {
+ return []string{"missing message string field"}
+ }
+
+ if _, hasError := obj["error"]; hasError {
+ return []string{"unexpected error field"}
+ }
+
+ return nil
+}
+
+func validateDIDDocumentShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object did document"}
+ }
+
+ issues := make([]string, 0)
+ if _, ok := obj["@context"].([]interface{}); !ok {
+ issues = append(issues, "missing @context array")
+ }
+
+ if _, ok := obj["id"].(string); !ok {
+ issues = append(issues, "missing id string")
+ }
+
+ allowed := map[string]struct{}{
+ "@context": {},
+ "id": {},
+ "alsoKnownAs": {},
+ "verificationMethod": {},
+ "service": {},
+ "deactivated": {},
+ }
+
+ if val, ok := obj["alsoKnownAs"]; ok && typeName(val) != "array" {
+ issues = append(issues, "alsoKnownAs must be array")
+ }
+
+ if val, ok := obj["verificationMethod"]; ok && typeName(val) != "array" {
+ issues = append(issues, "verificationMethod must be array")
+ }
+
+ if val, ok := obj["service"]; ok && typeName(val) != "array" {
+ issues = append(issues, "service must be array")
+ }
+
+ if val, ok := obj["deactivated"]; ok && typeName(val) != "bool" {
+ issues = append(issues, "deactivated must be bool")
+ }
+
+ for key := range obj {
+ if _, ok := allowed[key]; !ok {
+ issues = append(issues, "extra field "+key)
+ }
+ }
+
+ sort.Strings(issues)
+
+ return issues
+}
+
+func validatePLCDataShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object plc data"}
+ }
+
+ required := map[string]string{
+ "did": "string",
+ "verificationMethods": "object",
+ "rotationKeys": "array",
+ "alsoKnownAs": "array",
+ "services": "object",
+ }
+
+ issues := make([]string, 0)
+ for key, wantType := range required {
+ got, ok := obj[key]
+ if !ok {
+ issues = append(issues, "missing field "+key)
+
+ continue
+ }
+
+ if gotType := typeName(got); gotType != wantType {
+ issues = append(issues, fmt.Sprintf("field %s type=%s want=%s", key, gotType, wantType))
+ }
+ }
+
+ for key := range obj {
+ if _, ok := required[key]; !ok {
+ issues = append(issues, "extra field "+key)
+ }
+ }
+
+ sort.Strings(issues)
+
+ return issues
+}
+
+func validateOperationArray(v interface{}) []string {
+ rows, ok := v.([]interface{})
+ if !ok {
+ return []string{"expected array of operations"}
+ }
+
+ issues := make([]string, 0)
+ for idx, row := range rows {
+ rowIssues := validateOperationShape(row)
+ for _, issue := range rowIssues {
+ issues = append(issues, fmt.Sprintf("row %d: %s", idx, issue))
+ }
+ }
+
+ return issues
+}
+
+func validateAuditArrayShape(v interface{}) []string {
+ rows, ok := v.([]interface{})
+ if !ok {
+ return []string{"expected array of audit entries"}
+ }
+
+ issues := make([]string, 0)
+ for idx, row := range rows {
+ obj, ok := row.(map[string]interface{})
+ if !ok {
+ issues = append(issues, fmt.Sprintf("row %d: expected object", idx))
+
+ continue
+ }
+
+ required := map[string]string{
+ "did": "string",
+ "operation": "object",
+ "cid": "string",
+ "nullified": "bool",
+ "createdAt": "string",
+ }
+
+ for key, want := range required {
+ val, ok := obj[key]
+ if !ok {
+ issues = append(issues, fmt.Sprintf("row %d: missing field %s", idx, key))
+
+ continue
+ }
+
+ if got := typeName(val); got != want {
+ issues = append(issues, fmt.Sprintf("row %d: field %s type=%s want=%s", idx, key, got, want))
+ }
+ }
+
+ if op, ok := obj["operation"]; ok {
+ for _, issue := range validateOperationShape(op) {
+ issues = append(issues, fmt.Sprintf("row %d operation: %s", idx, issue))
+ }
+ }
+ }
+
+ return issues
+}
+
+func validateOperationShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected operation object"}
+ }
+
+ typ, _ := obj["type"].(string)
+ if strings.TrimSpace(typ) == "" {
+ return []string{"missing type string"}
+ }
+
+ issues := make([]string, 0)
+
+ if got := typeName(obj["sig"]); got != "string" {
+ issues = append(issues, "missing sig string")
+ }
+
+ if _, ok := obj["prev"]; !ok {
+ issues = append(issues, "missing prev field")
+ }
+
+ switch typ {
+ case "create":
+ for _, field := range []string{"handle", "service", "signingKey", "recoveryKey"} {
+ if got := typeName(obj[field]); got != "string" {
+ issues = append(issues, "missing "+field+" string")
+ }
+ }
+ case "plc_operation":
+ if got := typeName(obj["services"]); got != "object" {
+ issues = append(issues, "missing services object")
+ }
+
+ if got := typeName(obj["alsoKnownAs"]); got != "array" {
+ issues = append(issues, "missing alsoKnownAs array")
+ }
+
+ if got := typeName(obj["rotationKeys"]); got != "array" {
+ issues = append(issues, "missing rotationKeys array")
+ }
+
+ if got := typeName(obj["verificationMethods"]); got != "object" {
+ issues = append(issues, "missing verificationMethods object")
+ }
+ case "plc_tombstone", "tombstone":
+ // No additional requirements.
+ }
+
+ sort.Strings(issues)
+
+ return issues
+}
+
+func doFetch(client *http.Client, rawURL string) (fetchResp, error) {
+ u, err := url.Parse(rawURL)
+ if err != nil {
+ return fetchResp{}, err
+ }
+ req, err := http.NewRequest(http.MethodGet, u.String(), nil)
+ if err != nil {
+ return fetchResp{}, err
+ }
+ resp, err := client.Do(req)
+ if err != nil {
+ return fetchResp{}, err
+ }
+ defer resp.Body.Close()
+
+ b, err := io.ReadAll(io.LimitReader(resp.Body, 10*1024*1024))
+ if err != nil {
+ return fetchResp{}, err
+ }
+
+ return fetchResp{
+ status: resp.StatusCode,
+ contentType: resp.Header.Get("Content-Type"),
+ body: b,
+ }, nil
+}
+
+func parseNDJSON(body []byte) ([]interface{}, error) {
+ out := make([]interface{}, 0)
+ s := bufio.NewScanner(bytes.NewReader(body))
+ s.Buffer(make([]byte, 0, 64*1024), 10*1024*1024)
+ for s.Scan() {
+ line := bytes.TrimSpace(s.Bytes())
+ if len(line) == 0 {
+ continue
+ }
+ var v interface{}
+ dec := json.NewDecoder(bytes.NewReader(line))
+ dec.UseNumber()
+ if err := dec.Decode(&v); err != nil {
+ return nil, err
+ }
+ out = append(out, v)
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func validateExportShape(v interface{}) []string {
+ obj, ok := v.(map[string]interface{})
+ if !ok {
+ return []string{"expected object line"}
+ }
+
+ required := map[string]string{
+ "did": "string",
+ "operation": "object",
+ "cid": "string",
+ "nullified": "bool",
+ "createdAt": "string",
+ }
+
+ issues := make([]string, 0)
+ for k, want := range required {
+ vv, ok := obj[k]
+ if !ok {
+ issues = append(issues, "missing field "+k)
+ continue
+ }
+ if typeName(vv) != want {
+ issues = append(issues, fmt.Sprintf("field %s type=%s want=%s", k, typeName(vv), want))
+ }
+ }
+
+ for k := range obj {
+ if _, ok := required[k]; !ok {
+ issues = append(issues, "extra field "+k)
+ }
+ }
+
+ sort.Strings(issues)
+ return issues
+}
+
+func compareJSONBodies(localBody, upBody []byte) (bool, bool, []string) {
+ var l, u interface{}
+ ld := json.NewDecoder(bytes.NewReader(localBody))
+ ld.UseNumber()
+ if err := ld.Decode(&l); err != nil {
+ return false, false, []string{"local json decode error: " + err.Error()}
+ }
+ ud := json.NewDecoder(bytes.NewReader(upBody))
+ ud.UseNumber()
+ if err := ud.Decode(&u); err != nil {
+ return false, false, []string{"upstream json decode error: " + err.Error()}
+ }
+
+ lm, um, issues := diffAny(l, u, "$")
+ return lm, um, issues
+}
+
+func diffAny(local, upstream interface{}, path string) (hasLocalMismatch bool, hasUpMismatch bool, issues []string) {
+ lt, ut := typeName(local), typeName(upstream)
+ if lt != ut {
+ return true, true, []string{fmt.Sprintf("%s type mismatch local=%s upstream=%s", path, lt, ut)}
+ }
+
+ switch l := local.(type) {
+ case map[string]interface{}:
+ u := upstream.(map[string]interface{})
+ for k := range u {
+ if _, ok := l[k]; !ok {
+ hasLocalMismatch = true
+ issues = append(issues, fmt.Sprintf("%s missing field in local: %s", path, k))
+ }
+ }
+ for k := range l {
+ if _, ok := u[k]; !ok {
+ hasUpMismatch = true
+ issues = append(issues, fmt.Sprintf("%s extra field in local: %s", path, k))
+ }
+ }
+ for k := range l {
+ uv, ok := u[k]
+ if !ok {
+ continue
+ }
+ lm, um, sub := diffAny(l[k], uv, path+"."+k)
+ if lm {
+ hasLocalMismatch = true
+ }
+ if um {
+ hasUpMismatch = true
+ }
+ issues = append(issues, sub...)
+ }
+ case []interface{}:
+ u := upstream.([]interface{})
+ n := min(len(l), len(u))
+ for i := 0; i < n; i++ {
+ lm, um, sub := diffAny(l[i], u[i], fmt.Sprintf("%s[%d]", path, i))
+ if lm {
+ hasLocalMismatch = true
+ }
+ if um {
+ hasUpMismatch = true
+ }
+ issues = append(issues, sub...)
+ }
+ }
+
+ return hasLocalMismatch, hasUpMismatch, issues
+}
+
+func typeName(v interface{}) string {
+ switch v.(type) {
+ case nil:
+ return "null"
+ case bool:
+ return "bool"
+ case string:
+ return "string"
+ case json.Number, float64, float32, int, int64, int32, uint, uint64:
+ return "number"
+ case []interface{}:
+ return "array"
+ case map[string]interface{}:
+ return "object"
+ default:
+ return fmt.Sprintf("%T", v)
+ }
+}
+
+func normalizeContentType(v string) string {
+ v = strings.TrimSpace(v)
+ if v == "" {
+ return ""
+ }
+ parts := strings.Split(v, ";")
+ return strings.ToLower(strings.TrimSpace(parts[0]))
+}
+
+func isJSONish(ct string) bool {
+ return ct == "application/json" || ct == "application/did+ld+json"
+}
+
+func contains(xs []string, target string) bool {
+ for _, x := range xs {
+ if x == target {
+ return true
+ }
+ }
+ return false
+}
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}