Add response endpoint

Change-Id: I37fb32253d3011a8960a17852ea611443b9f093e
diff --git a/mapper/mapper.go b/mapper/mapper.go
index 0bbe39f..fe46a83 100644
--- a/mapper/mapper.go
+++ b/mapper/mapper.go
@@ -1,12 +1,9 @@
 package mapper
 
 import (
-	"encoding/json"
 	"fmt"
 
-	"github.com/KorAP/KoralPipe-TermMapper/ast"
 	"github.com/KorAP/KoralPipe-TermMapper/config"
-	"github.com/KorAP/KoralPipe-TermMapper/matcher"
 	"github.com/KorAP/KoralPipe-TermMapper/parser"
 )
 
@@ -81,407 +78,3 @@
 	Direction   Direction
 	AddRewrites bool
 }
-
-// ApplyQueryMappings applies the specified mapping rules to a JSON object
-func (m *Mapper) ApplyQueryMappings(mappingID string, opts MappingOptions, jsonData any) (any, error) {
-	// Validate mapping ID
-	if _, exists := m.mappingLists[mappingID]; !exists {
-		return nil, fmt.Errorf("mapping list with ID %s not found", mappingID)
-	}
-
-	// Get the parsed rules
-	rules := m.parsedRules[mappingID]
-
-	// Check if we have a wrapper object with a "query" field
-	var queryData any
-	var hasQueryWrapper bool
-
-	if jsonMap, ok := jsonData.(map[string]any); ok {
-		if query, exists := jsonMap["query"]; exists {
-			queryData = query
-			hasQueryWrapper = true
-		}
-	}
-
-	// If no query wrapper was found, use the entire input
-	if !hasQueryWrapper {
-		// If the input itself is not a valid query object, return it as is
-		if !isValidQueryObject(jsonData) {
-			return jsonData, nil
-		}
-		queryData = jsonData
-	} else if queryData == nil || !isValidQueryObject(queryData) {
-		// If we have a query wrapper but the query is nil or not a valid object,
-		// return the original data
-		return jsonData, nil
-	}
-
-	// Store rewrites if they exist
-	var oldRewrites any
-	if queryMap, ok := queryData.(map[string]any); ok {
-		if rewrites, exists := queryMap["rewrites"]; exists {
-			oldRewrites = rewrites
-			delete(queryMap, "rewrites")
-		}
-	}
-
-	// Convert input JSON to AST
-	jsonBytes, err := json.Marshal(queryData)
-	if err != nil {
-		return nil, fmt.Errorf("failed to marshal input JSON: %w", err)
-	}
-
-	node, err := parser.ParseJSON(jsonBytes)
-	if err != nil {
-		return nil, fmt.Errorf("failed to parse JSON into AST: %w", err)
-	}
-
-	// Store whether the input was a Token
-	isToken := false
-	var tokenWrap ast.Node
-	if token, ok := node.(*ast.Token); ok {
-		isToken = true
-		tokenWrap = token.Wrap
-		node = tokenWrap
-	}
-
-	// Store original node for rewrite if needed
-	var originalNode ast.Node
-	if opts.AddRewrites {
-		originalNode = node.Clone()
-	}
-
-	// Pre-check foundry/layer overrides to optimize processing
-	var patternFoundry, patternLayer, replacementFoundry, replacementLayer string
-	if opts.Direction { // true means AtoB
-		patternFoundry, patternLayer = opts.FoundryA, opts.LayerA
-		replacementFoundry, replacementLayer = opts.FoundryB, opts.LayerB
-	} else {
-		patternFoundry, patternLayer = opts.FoundryB, opts.LayerB
-		replacementFoundry, replacementLayer = opts.FoundryA, opts.LayerA
-	}
-
-	// Create a pattern cache key for memoization
-	type patternCacheKey struct {
-		ruleIndex     int
-		foundry       string
-		layer         string
-		isReplacement bool
-	}
-	patternCache := make(map[patternCacheKey]ast.Node)
-
-	// Apply each rule to the AST
-	for i, rule := range rules {
-		// Create pattern and replacement based on direction
-		var pattern, replacement ast.Node
-		if opts.Direction { // true means AtoB
-			pattern = rule.Upper
-			replacement = rule.Lower
-		} else {
-			pattern = rule.Lower
-			replacement = rule.Upper
-		}
-
-		// Extract the inner nodes from the pattern and replacement tokens
-		if token, ok := pattern.(*ast.Token); ok {
-			pattern = token.Wrap
-		}
-		if token, ok := replacement.(*ast.Token); ok {
-			replacement = token.Wrap
-		}
-
-		// First, quickly check if the pattern could match without creating a full matcher
-		// This is a lightweight pre-check to avoid expensive operations
-		if !m.couldPatternMatch(node, pattern) {
-			continue
-		}
-
-		// Get or create pattern with overrides
-		patternKey := patternCacheKey{ruleIndex: i, foundry: patternFoundry, layer: patternLayer, isReplacement: false}
-		processedPattern, exists := patternCache[patternKey]
-		if !exists {
-			// Clone pattern only when needed
-			processedPattern = pattern.Clone()
-			// Apply foundry and layer overrides only if they're non-empty
-			if patternFoundry != "" || patternLayer != "" {
-				ast.ApplyFoundryAndLayerOverrides(processedPattern, patternFoundry, patternLayer)
-			}
-			patternCache[patternKey] = processedPattern
-		}
-
-		// Create a temporary matcher to check for actual matches
-		tempMatcher, err := matcher.NewMatcher(ast.Pattern{Root: processedPattern}, ast.Replacement{Root: &ast.Term{}})
-		if err != nil {
-			return nil, fmt.Errorf("failed to create temporary matcher: %w", err)
-		}
-
-		// Only proceed if there's an actual match
-		if !tempMatcher.Match(node) {
-			continue
-		}
-
-		// Get or create replacement with overrides (lazy evaluation)
-		replacementKey := patternCacheKey{ruleIndex: i, foundry: replacementFoundry, layer: replacementLayer, isReplacement: true}
-		processedReplacement, exists := patternCache[replacementKey]
-		if !exists {
-			// Clone replacement only when we have a match
-			processedReplacement = replacement.Clone()
-			// Apply foundry and layer overrides only if they're non-empty
-			if replacementFoundry != "" || replacementLayer != "" {
-				ast.ApplyFoundryAndLayerOverrides(processedReplacement, replacementFoundry, replacementLayer)
-			}
-			patternCache[replacementKey] = processedReplacement
-		}
-
-		// Create the actual matcher and apply replacement
-		actualMatcher, err := matcher.NewMatcher(ast.Pattern{Root: processedPattern}, ast.Replacement{Root: processedReplacement})
-		if err != nil {
-			return nil, fmt.Errorf("failed to create matcher: %w", err)
-		}
-		node = actualMatcher.Replace(node)
-	}
-
-	// Wrap the result in a token if the input was a token
-	var result ast.Node
-	if isToken {
-		result = &ast.Token{Wrap: node}
-	} else {
-		result = node
-	}
-
-	// Convert AST back to JSON
-	resultBytes, err := parser.SerializeToJSON(result)
-	if err != nil {
-		return nil, fmt.Errorf("failed to serialize AST to JSON: %w", err)
-	}
-
-	// Parse the JSON string back into
-	var resultData any
-	if err := json.Unmarshal(resultBytes, &resultData); err != nil {
-		return nil, fmt.Errorf("failed to parse result JSON: %w", err)
-	}
-
-	// Add rewrites if enabled and node was changed
-	if opts.AddRewrites && !ast.NodesEqual(node, originalNode) {
-		// Create rewrite object
-		rewrite := map[string]any{
-			"@type":  "koral:rewrite",
-			"editor": "termMapper",
-		}
-
-		// Check if the node types are different (structural change)
-		if originalNode.Type() != node.Type() {
-			// Full node replacement
-			originalBytes, err := parser.SerializeToJSON(originalNode)
-			if err != nil {
-				return nil, fmt.Errorf("failed to serialize original node for rewrite: %w", err)
-			}
-			var originalJSON any
-			if err := json.Unmarshal(originalBytes, &originalJSON); err != nil {
-				return nil, fmt.Errorf("failed to parse original node JSON for rewrite: %w", err)
-			}
-			rewrite["original"] = originalJSON
-		} else if term, ok := originalNode.(*ast.Term); ok && ast.IsTermNode(node) {
-			// Check which attributes changed
-			newTerm := node.(*ast.Term)
-			if term.Foundry != newTerm.Foundry {
-				rewrite["scope"] = "foundry"
-				rewrite["original"] = term.Foundry
-			} else if term.Layer != newTerm.Layer {
-				rewrite["scope"] = "layer"
-				rewrite["original"] = term.Layer
-			} else if term.Key != newTerm.Key {
-				rewrite["scope"] = "key"
-				rewrite["original"] = term.Key
-			} else if term.Value != newTerm.Value {
-				rewrite["scope"] = "value"
-				rewrite["original"] = term.Value
-			} else {
-				// No specific attribute changed, use full node replacement
-				originalBytes, err := parser.SerializeToJSON(originalNode)
-				if err != nil {
-					return nil, fmt.Errorf("failed to serialize original node for rewrite: %w", err)
-				}
-				var originalJSON any
-				if err := json.Unmarshal(originalBytes, &originalJSON); err != nil {
-					return nil, fmt.Errorf("failed to parse original node JSON for rewrite: %w", err)
-				}
-				rewrite["original"] = originalJSON
-			}
-		} else {
-			// Full node replacement
-			originalBytes, err := parser.SerializeToJSON(originalNode)
-			if err != nil {
-				return nil, fmt.Errorf("failed to serialize original node for rewrite: %w", err)
-			}
-			var originalJSON any
-			if err := json.Unmarshal(originalBytes, &originalJSON); err != nil {
-				return nil, fmt.Errorf("failed to parse original node JSON for rewrite: %w", err)
-			}
-			rewrite["original"] = originalJSON
-		}
-
-		// Add rewrite to the node
-		if resultMap, ok := resultData.(map[string]any); ok {
-			if wrapMap, ok := resultMap["wrap"].(map[string]any); ok {
-				rewrites, exists := wrapMap["rewrites"]
-				if !exists {
-					rewrites = []any{}
-				}
-				if rewritesList, ok := rewrites.([]any); ok {
-					wrapMap["rewrites"] = append(rewritesList, rewrite)
-				} else {
-					wrapMap["rewrites"] = []any{rewrite}
-				}
-			}
-		}
-	}
-
-	// Restore rewrites if they existed
-	if oldRewrites != nil {
-		// Process old rewrites through AST to ensure backward compatibility
-		if rewritesList, ok := oldRewrites.([]any); ok {
-			processedRewrites := make([]any, len(rewritesList))
-			for i, rewriteData := range rewritesList {
-				// Marshal and unmarshal each rewrite to apply backward compatibility
-				rewriteBytes, err := json.Marshal(rewriteData)
-				if err != nil {
-					return nil, fmt.Errorf("failed to marshal old rewrite %d: %w", i, err)
-				}
-				var rewrite ast.Rewrite
-				if err := json.Unmarshal(rewriteBytes, &rewrite); err != nil {
-					return nil, fmt.Errorf("failed to unmarshal old rewrite %d: %w", i, err)
-				}
-				// Marshal back to get the transformed version
-				transformedBytes, err := json.Marshal(&rewrite)
-				if err != nil {
-					return nil, fmt.Errorf("failed to marshal transformed rewrite %d: %w", i, err)
-				}
-				var transformedRewrite any
-				if err := json.Unmarshal(transformedBytes, &transformedRewrite); err != nil {
-					return nil, fmt.Errorf("failed to unmarshal transformed rewrite %d: %w", i, err)
-				}
-				processedRewrites[i] = transformedRewrite
-			}
-			if resultMap, ok := resultData.(map[string]any); ok {
-				resultMap["rewrites"] = processedRewrites
-			}
-		} else {
-			// If it's not a list, restore as-is
-			if resultMap, ok := resultData.(map[string]any); ok {
-				resultMap["rewrites"] = oldRewrites
-			}
-		}
-	}
-
-	// If we had a query wrapper, put the transformed data back in it
-	if hasQueryWrapper {
-		if wrapper, ok := jsonData.(map[string]any); ok {
-			wrapper["query"] = resultData
-			return wrapper, nil
-		}
-	}
-
-	return resultData, nil
-}
-
-// isValidQueryObject checks if the query data is a valid object that can be processed
-func isValidQueryObject(data any) bool {
-	// Check if it's a map
-	queryMap, ok := data.(map[string]any)
-	if !ok {
-		return false
-	}
-
-	// Check if it has the required @type field
-	if _, ok := queryMap["@type"]; !ok {
-		return false
-	}
-
-	return true
-}
-
-// couldPatternMatch performs a lightweight check to see if a pattern could potentially match a node
-// This is an optimization to avoid expensive operations when there's clearly no match possible
-func (m *Mapper) couldPatternMatch(node, pattern ast.Node) bool {
-	if pattern == nil {
-		return true
-	}
-	if node == nil {
-		return false
-	}
-
-	// Handle Token wrappers
-	if token, ok := pattern.(*ast.Token); ok {
-		pattern = token.Wrap
-	}
-	if token, ok := node.(*ast.Token); ok {
-		node = token.Wrap
-	}
-
-	// For simple terms, check basic compatibility
-	if patternTerm, ok := pattern.(*ast.Term); ok {
-		// Check if there's any term in the node structure that could match
-		return m.hasMatchingTerm(node, patternTerm)
-	}
-
-	// For TermGroups, we need to check all possible matches
-	if patternGroup, ok := pattern.(*ast.TermGroup); ok {
-		if patternGroup.Relation == ast.OrRelation {
-			// For OR relations, any operand could match
-			for _, op := range patternGroup.Operands {
-				if m.couldPatternMatch(node, op) {
-					return true
-				}
-			}
-			return false
-		} else {
-			// For AND relations, all operands must have potential matches
-			for _, op := range patternGroup.Operands {
-				if !m.couldPatternMatch(node, op) {
-					return false
-				}
-			}
-			return true
-		}
-	}
-
-	// For other cases, assume they could match (conservative approach)
-	return true
-}
-
-// hasMatchingTerm checks if there's any term in the node structure that could match the pattern term
-func (m *Mapper) hasMatchingTerm(node ast.Node, patternTerm *ast.Term) bool {
-	if node == nil {
-		return false
-	}
-
-	switch n := node.(type) {
-	case *ast.Term:
-		// Check if this term could match the pattern
-		// We only check key as that's the most distinctive attribute
-		return n.Key == patternTerm.Key
-	case *ast.TermGroup:
-		// Check all operands
-		for _, op := range n.Operands {
-			if m.hasMatchingTerm(op, patternTerm) {
-				return true
-			}
-		}
-		return false
-	case *ast.Token:
-		return m.hasMatchingTerm(n.Wrap, patternTerm)
-	case *ast.CatchallNode:
-		if n.Wrap != nil && m.hasMatchingTerm(n.Wrap, patternTerm) {
-			return true
-		}
-		for _, op := range n.Operands {
-			if m.hasMatchingTerm(op, patternTerm) {
-				return true
-			}
-		}
-		return false
-	default:
-		return false
-	}
-}
diff --git a/mapper/query.go b/mapper/query.go
new file mode 100644
index 0000000..81fddba
--- /dev/null
+++ b/mapper/query.go
@@ -0,0 +1,414 @@
+package mapper // ApplyQueryMappings applies the specified mapping rules to a JSON object
+
+import (
+	"encoding/json"
+	"fmt"
+
+	"github.com/KorAP/KoralPipe-TermMapper/ast"
+	"github.com/KorAP/KoralPipe-TermMapper/matcher"
+	"github.com/KorAP/KoralPipe-TermMapper/parser"
+)
+
+// ApplyQueryMappings applies the specified mapping rules to a JSON object
+func (m *Mapper) ApplyQueryMappings(mappingID string, opts MappingOptions, jsonData any) (any, error) {
+	// Validate mapping ID
+	if _, exists := m.mappingLists[mappingID]; !exists {
+		return nil, fmt.Errorf("mapping list with ID %s not found", mappingID)
+	}
+
+	// Get the parsed rules
+	rules := m.parsedRules[mappingID]
+
+	// Check if we have a wrapper object with a "query" field
+	var queryData any
+	var hasQueryWrapper bool
+
+	if jsonMap, ok := jsonData.(map[string]any); ok {
+		if query, exists := jsonMap["query"]; exists {
+			queryData = query
+			hasQueryWrapper = true
+		}
+	}
+
+	// If no query wrapper was found, use the entire input
+	if !hasQueryWrapper {
+		// If the input itself is not a valid query object, return it as is
+		if !isValidQueryObject(jsonData) {
+			return jsonData, nil
+		}
+		queryData = jsonData
+	} else if queryData == nil || !isValidQueryObject(queryData) {
+		// If we have a query wrapper but the query is nil or not a valid object,
+		// return the original data
+		return jsonData, nil
+	}
+
+	// Store rewrites if they exist
+	var oldRewrites any
+	if queryMap, ok := queryData.(map[string]any); ok {
+		if rewrites, exists := queryMap["rewrites"]; exists {
+			oldRewrites = rewrites
+			delete(queryMap, "rewrites")
+		}
+	}
+
+	// Convert input JSON to AST
+	jsonBytes, err := json.Marshal(queryData)
+	if err != nil {
+		return nil, fmt.Errorf("failed to marshal input JSON: %w", err)
+	}
+
+	node, err := parser.ParseJSON(jsonBytes)
+	if err != nil {
+		return nil, fmt.Errorf("failed to parse JSON into AST: %w", err)
+	}
+
+	// Store whether the input was a Token
+	isToken := false
+	var tokenWrap ast.Node
+	if token, ok := node.(*ast.Token); ok {
+		isToken = true
+		tokenWrap = token.Wrap
+		node = tokenWrap
+	}
+
+	// Store original node for rewrite if needed
+	var originalNode ast.Node
+	if opts.AddRewrites {
+		originalNode = node.Clone()
+	}
+
+	// Pre-check foundry/layer overrides to optimize processing
+	var patternFoundry, patternLayer, replacementFoundry, replacementLayer string
+	if opts.Direction { // true means AtoB
+		patternFoundry, patternLayer = opts.FoundryA, opts.LayerA
+		replacementFoundry, replacementLayer = opts.FoundryB, opts.LayerB
+	} else {
+		patternFoundry, patternLayer = opts.FoundryB, opts.LayerB
+		replacementFoundry, replacementLayer = opts.FoundryA, opts.LayerA
+	}
+
+	// Create a pattern cache key for memoization
+	type patternCacheKey struct {
+		ruleIndex     int
+		foundry       string
+		layer         string
+		isReplacement bool
+	}
+	patternCache := make(map[patternCacheKey]ast.Node)
+
+	// Apply each rule to the AST
+	for i, rule := range rules {
+		// Create pattern and replacement based on direction
+		var pattern, replacement ast.Node
+		if opts.Direction { // true means AtoB
+			pattern = rule.Upper
+			replacement = rule.Lower
+		} else {
+			pattern = rule.Lower
+			replacement = rule.Upper
+		}
+
+		// Extract the inner nodes from the pattern and replacement tokens
+		if token, ok := pattern.(*ast.Token); ok {
+			pattern = token.Wrap
+		}
+		if token, ok := replacement.(*ast.Token); ok {
+			replacement = token.Wrap
+		}
+
+		// First, quickly check if the pattern could match without creating a full matcher
+		// This is a lightweight pre-check to avoid expensive operations
+		if !m.couldPatternMatch(node, pattern) {
+			continue
+		}
+
+		// Get or create pattern with overrides
+		patternKey := patternCacheKey{ruleIndex: i, foundry: patternFoundry, layer: patternLayer, isReplacement: false}
+		processedPattern, exists := patternCache[patternKey]
+		if !exists {
+			// Clone pattern only when needed
+			processedPattern = pattern.Clone()
+			// Apply foundry and layer overrides only if they're non-empty
+			if patternFoundry != "" || patternLayer != "" {
+				ast.ApplyFoundryAndLayerOverrides(processedPattern, patternFoundry, patternLayer)
+			}
+			patternCache[patternKey] = processedPattern
+		}
+
+		// Create a temporary matcher to check for actual matches
+		tempMatcher, err := matcher.NewMatcher(ast.Pattern{Root: processedPattern}, ast.Replacement{Root: &ast.Term{}})
+		if err != nil {
+			return nil, fmt.Errorf("failed to create temporary matcher: %w", err)
+		}
+
+		// Only proceed if there's an actual match
+		if !tempMatcher.Match(node) {
+			continue
+		}
+
+		// Get or create replacement with overrides (lazy evaluation)
+		replacementKey := patternCacheKey{ruleIndex: i, foundry: replacementFoundry, layer: replacementLayer, isReplacement: true}
+		processedReplacement, exists := patternCache[replacementKey]
+		if !exists {
+			// Clone replacement only when we have a match
+			processedReplacement = replacement.Clone()
+			// Apply foundry and layer overrides only if they're non-empty
+			if replacementFoundry != "" || replacementLayer != "" {
+				ast.ApplyFoundryAndLayerOverrides(processedReplacement, replacementFoundry, replacementLayer)
+			}
+			patternCache[replacementKey] = processedReplacement
+		}
+
+		// Create the actual matcher and apply replacement
+		actualMatcher, err := matcher.NewMatcher(ast.Pattern{Root: processedPattern}, ast.Replacement{Root: processedReplacement})
+		if err != nil {
+			return nil, fmt.Errorf("failed to create matcher: %w", err)
+		}
+		node = actualMatcher.Replace(node)
+	}
+
+	// Wrap the result in a token if the input was a token
+	var result ast.Node
+	if isToken {
+		result = &ast.Token{Wrap: node}
+	} else {
+		result = node
+	}
+
+	// Convert AST back to JSON
+	resultBytes, err := parser.SerializeToJSON(result)
+	if err != nil {
+		return nil, fmt.Errorf("failed to serialize AST to JSON: %w", err)
+	}
+
+	// Parse the JSON string back into
+	var resultData any
+	if err := json.Unmarshal(resultBytes, &resultData); err != nil {
+		return nil, fmt.Errorf("failed to parse result JSON: %w", err)
+	}
+
+	// Add rewrites if enabled and node was changed
+	if opts.AddRewrites && !ast.NodesEqual(node, originalNode) {
+		// Create rewrite object
+		rewrite := map[string]any{
+			"@type":  "koral:rewrite",
+			"editor": "termMapper",
+		}
+
+		// Check if the node types are different (structural change)
+		if originalNode.Type() != node.Type() {
+			// Full node replacement
+			originalBytes, err := parser.SerializeToJSON(originalNode)
+			if err != nil {
+				return nil, fmt.Errorf("failed to serialize original node for rewrite: %w", err)
+			}
+			var originalJSON any
+			if err := json.Unmarshal(originalBytes, &originalJSON); err != nil {
+				return nil, fmt.Errorf("failed to parse original node JSON for rewrite: %w", err)
+			}
+			rewrite["original"] = originalJSON
+		} else if term, ok := originalNode.(*ast.Term); ok && ast.IsTermNode(node) {
+			// Check which attributes changed
+			newTerm := node.(*ast.Term)
+			if term.Foundry != newTerm.Foundry {
+				rewrite["scope"] = "foundry"
+				rewrite["original"] = term.Foundry
+			} else if term.Layer != newTerm.Layer {
+				rewrite["scope"] = "layer"
+				rewrite["original"] = term.Layer
+			} else if term.Key != newTerm.Key {
+				rewrite["scope"] = "key"
+				rewrite["original"] = term.Key
+			} else if term.Value != newTerm.Value {
+				rewrite["scope"] = "value"
+				rewrite["original"] = term.Value
+			} else {
+				// No specific attribute changed, use full node replacement
+				originalBytes, err := parser.SerializeToJSON(originalNode)
+				if err != nil {
+					return nil, fmt.Errorf("failed to serialize original node for rewrite: %w", err)
+				}
+				var originalJSON any
+				if err := json.Unmarshal(originalBytes, &originalJSON); err != nil {
+					return nil, fmt.Errorf("failed to parse original node JSON for rewrite: %w", err)
+				}
+				rewrite["original"] = originalJSON
+			}
+		} else {
+			// Full node replacement
+			originalBytes, err := parser.SerializeToJSON(originalNode)
+			if err != nil {
+				return nil, fmt.Errorf("failed to serialize original node for rewrite: %w", err)
+			}
+			var originalJSON any
+			if err := json.Unmarshal(originalBytes, &originalJSON); err != nil {
+				return nil, fmt.Errorf("failed to parse original node JSON for rewrite: %w", err)
+			}
+			rewrite["original"] = originalJSON
+		}
+
+		// Add rewrite to the node
+		if resultMap, ok := resultData.(map[string]any); ok {
+			if wrapMap, ok := resultMap["wrap"].(map[string]any); ok {
+				rewrites, exists := wrapMap["rewrites"]
+				if !exists {
+					rewrites = []any{}
+				}
+				if rewritesList, ok := rewrites.([]any); ok {
+					wrapMap["rewrites"] = append(rewritesList, rewrite)
+				} else {
+					wrapMap["rewrites"] = []any{rewrite}
+				}
+			}
+		}
+	}
+
+	// Restore rewrites if they existed
+	if oldRewrites != nil {
+		// Process old rewrites through AST to ensure backward compatibility
+		if rewritesList, ok := oldRewrites.([]any); ok {
+			processedRewrites := make([]any, len(rewritesList))
+			for i, rewriteData := range rewritesList {
+				// Marshal and unmarshal each rewrite to apply backward compatibility
+				rewriteBytes, err := json.Marshal(rewriteData)
+				if err != nil {
+					return nil, fmt.Errorf("failed to marshal old rewrite %d: %w", i, err)
+				}
+				var rewrite ast.Rewrite
+				if err := json.Unmarshal(rewriteBytes, &rewrite); err != nil {
+					return nil, fmt.Errorf("failed to unmarshal old rewrite %d: %w", i, err)
+				}
+				// Marshal back to get the transformed version
+				transformedBytes, err := json.Marshal(&rewrite)
+				if err != nil {
+					return nil, fmt.Errorf("failed to marshal transformed rewrite %d: %w", i, err)
+				}
+				var transformedRewrite any
+				if err := json.Unmarshal(transformedBytes, &transformedRewrite); err != nil {
+					return nil, fmt.Errorf("failed to unmarshal transformed rewrite %d: %w", i, err)
+				}
+				processedRewrites[i] = transformedRewrite
+			}
+			if resultMap, ok := resultData.(map[string]any); ok {
+				resultMap["rewrites"] = processedRewrites
+			}
+		} else {
+			// If it's not a list, restore as-is
+			if resultMap, ok := resultData.(map[string]any); ok {
+				resultMap["rewrites"] = oldRewrites
+			}
+		}
+	}
+
+	// If we had a query wrapper, put the transformed data back in it
+	if hasQueryWrapper {
+		if wrapper, ok := jsonData.(map[string]any); ok {
+			wrapper["query"] = resultData
+			return wrapper, nil
+		}
+	}
+
+	return resultData, nil
+}
+
+// isValidQueryObject checks if the query data is a valid object that can be processed
+func isValidQueryObject(data any) bool {
+	// Check if it's a map
+	queryMap, ok := data.(map[string]any)
+	if !ok {
+		return false
+	}
+
+	// Check if it has the required @type field
+	if _, ok := queryMap["@type"]; !ok {
+		return false
+	}
+
+	return true
+}
+
+// couldPatternMatch performs a lightweight check to see if a pattern could potentially match a node
+// This is an optimization to avoid expensive operations when there's clearly no match possible
+func (m *Mapper) couldPatternMatch(node, pattern ast.Node) bool {
+	if pattern == nil {
+		return true
+	}
+	if node == nil {
+		return false
+	}
+
+	// Handle Token wrappers
+	if token, ok := pattern.(*ast.Token); ok {
+		pattern = token.Wrap
+	}
+	if token, ok := node.(*ast.Token); ok {
+		node = token.Wrap
+	}
+
+	// For simple terms, check basic compatibility
+	if patternTerm, ok := pattern.(*ast.Term); ok {
+		// Check if there's any term in the node structure that could match
+		return m.hasMatchingTerm(node, patternTerm)
+	}
+
+	// For TermGroups, we need to check all possible matches
+	if patternGroup, ok := pattern.(*ast.TermGroup); ok {
+		if patternGroup.Relation == ast.OrRelation {
+			// For OR relations, any operand could match
+			for _, op := range patternGroup.Operands {
+				if m.couldPatternMatch(node, op) {
+					return true
+				}
+			}
+			return false
+		} else {
+			// For AND relations, all operands must have potential matches
+			for _, op := range patternGroup.Operands {
+				if !m.couldPatternMatch(node, op) {
+					return false
+				}
+			}
+			return true
+		}
+	}
+
+	// For other cases, assume they could match (conservative approach)
+	return true
+}
+
+// hasMatchingTerm checks if there's any term in the node structure that could match the pattern term
+func (m *Mapper) hasMatchingTerm(node ast.Node, patternTerm *ast.Term) bool {
+	if node == nil {
+		return false
+	}
+
+	switch n := node.(type) {
+	case *ast.Term:
+		// Check if this term could match the pattern
+		// We only check key as that's the most distinctive attribute
+		return n.Key == patternTerm.Key
+	case *ast.TermGroup:
+		// Check all operands
+		for _, op := range n.Operands {
+			if m.hasMatchingTerm(op, patternTerm) {
+				return true
+			}
+		}
+		return false
+	case *ast.Token:
+		return m.hasMatchingTerm(n.Wrap, patternTerm)
+	case *ast.CatchallNode:
+		if n.Wrap != nil && m.hasMatchingTerm(n.Wrap, patternTerm) {
+			return true
+		}
+		for _, op := range n.Operands {
+			if m.hasMatchingTerm(op, patternTerm) {
+				return true
+			}
+		}
+		return false
+	default:
+		return false
+	}
+}
diff --git a/mapper/response.go b/mapper/response.go
index 99586f6..8fe1fc1 100644
--- a/mapper/response.go
+++ b/mapper/response.go
@@ -66,13 +66,18 @@
 		}
 
 		// If foundry/layer are empty in options, get them from the mapping list
-		if replacementFoundry == "" || replacementLayer == "" {
-			mappingList := m.mappingLists[mappingID]
+		mappingList := m.mappingLists[mappingID]
+		if replacementFoundry == "" {
 			if opts.Direction { // AtoB
 				replacementFoundry = mappingList.FoundryB
-				replacementLayer = mappingList.LayerB
 			} else {
 				replacementFoundry = mappingList.FoundryA
+			}
+		}
+		if replacementLayer == "" {
+			if opts.Direction { // AtoB
+				replacementLayer = mappingList.LayerB
+			} else {
 				replacementLayer = mappingList.LayerA
 			}
 		}
diff --git a/mapper/response_test.go b/mapper/response_test.go
index b7ffbdc..67674d7 100644
--- a/mapper/response_test.go
+++ b/mapper/response_test.go
@@ -927,7 +927,8 @@
 func TestResponseMappingNestedSpans(t *testing.T) {
 	// Snippet with deeply nested spans
 	responseSnippet := `{
-		"snippet": "<span title=\"level1/l:outer\"><span title=\"level2/l:middle\"><span title=\"marmot/p:DET\">der</span></span></span>"
+		"snippet": "<span title=\"level1/l:outer\"><span title=\"level2/l:middle\"><span title=\"marmot/p:DET\">der</span></span></span>",
+		"author": "John Doe"
 	}`
 
 	// Create test mapping list
@@ -965,4 +966,7 @@
 	assert.Contains(t, snippet, `title="level1/l:outer"`)
 	assert.Contains(t, snippet, `title="level2/l:middle"`)
 	assert.Contains(t, snippet, `title="marmot/p:DET"`)
+
+	author := resultMap["author"].(string)
+	assert.Equal(t, "John Doe", author)
 }