implemented ordering preservation

This commit is contained in:
Martino Ferrari
2026-01-23 10:23:02 +01:00
parent e2c87c90f3
commit 5c3f05a1a4
18 changed files with 262 additions and 279 deletions

View File

@@ -71,86 +71,38 @@ func (b *Builder) writeNodeContent(f *os.File, node *index.ProjectNode, indent i
indentStr := strings.Repeat(" ", indent) indentStr := strings.Repeat(" ", indent)
// If this node has a RealName (e.g. +App), we print it as an object definition // If this node has a RealName (e.g. +App), we print it as an object definition
// UNLESS it is the top-level output file itself?
// If we are writing "App.marte", maybe we are writing the *body* of App?
// Spec: "unifying multi-file project into a single configuration output"
// Let's assume we print the Node itself.
if node.RealName != "" { if node.RealName != "" {
fmt.Fprintf(f, "%s%s = {\n", indentStr, node.RealName) fmt.Fprintf(f, "%s%s = {\n", indentStr, node.RealName)
indent++ indent++
indentStr = strings.Repeat(" ", indent) indentStr = strings.Repeat(" ", indent)
} }
writtenChildren := make(map[string]bool)
// 2. Write definitions from fragments // 2. Write definitions from fragments
for _, frag := range node.Fragments { for _, frag := range node.Fragments {
// Use formatter logic to print definitions
// We need a temporary Config to use Formatter?
// Or just reimplement basic printing? Formatter is better.
// But Formatter prints to io.Writer.
// We can reuse formatDefinition logic if we exposed it, or just copy basic logic.
// Since we need to respect indentation, using Formatter.Format might be tricky
// unless we wrap definitions in a dummy structure.
for _, def := range frag.Definitions { for _, def := range frag.Definitions {
// Basic formatting for now, referencing formatter style switch d := def.(type) {
b.writeDefinition(f, def, indent) case *parser.Field:
b.writeDefinition(f, d, indent)
case *parser.ObjectNode:
norm := index.NormalizeName(d.Name)
if child, ok := node.Children[norm]; ok {
if !writtenChildren[norm] {
b.writeNodeContent(f, child, indent)
writtenChildren[norm] = true
}
}
}
} }
} }
// 3. Write Children (recursively) // 3. Write Children (recursively)
// Children are sub-nodes defined implicitly via #package A.B or explicitly +Sub
// Explicit +Sub are handled via Fragments logic (they are definitions in fragments).
// Implicit nodes (from #package A.B.C where B was never explicitly defined)
// show up in Children map but maybe not in Fragments?
// If a Child is NOT in fragments (implicit), we still need to write it.
// If it IS in fragments (explicit +Child), it was handled in loop above?
// Wait. My Indexer puts `+Sub` into `node.Children["Sub"]` AND adds a `Fragment` to `node` containing `+Sub` object?
// Let's check Indexer.
// Case ObjectNode:
// Adds Fragment to `child` (the Sub node).
// Does NOT add `ObjectNode` definition to `node`'s fragment list?
// "pt.addObjectFragment(child...)"
// It does NOT add to `fileFragment.Definitions`.
// So `node.Fragments` only contains Fields!
// Children are all in `node.Children`.
// So:
// 1. Write Fields (from Fragments).
// 2. Write Children (from Children map).
// But wait, Fragments might have order?
// "Relative ordering within a file is preserved."
// My Indexer splits Fields and Objects.
// Fields go to Fragments. Objects go to Children.
// This loses the relative order between Fields and Objects in the source file!
// Correct Indexer approach for preserving order:
// `Fragment` should contain a list of `Entry`.
// `Entry` can be `Field` OR `ChildNodeName`.
// But I just rewrote Indexer to split them.
// If strict order is required "within a file", my Indexer is slightly lossy regarding Field vs Object order.
// Spec: "Relative ordering within a file is preserved."
// To fix this without another full rewrite:
// Iterating `node.Children` alphabetically is arbitrary.
// We should ideally iterate them in the order they appear.
// For now, I will proceed with writing Children after Fields, which is a common convention,
// unless strict interleaving is required.
// Given "Class first" rule, reordering happens anyway.
// Sorting Children?
// Maybe keep a list of OrderedChildren in ProjectNode?
sortedChildren := make([]string, 0, len(node.Children)) sortedChildren := make([]string, 0, len(node.Children))
for k := range node.Children { for k := range node.Children {
sortedChildren = append(sortedChildren, k) if !writtenChildren[k] {
sortedChildren = append(sortedChildren, k)
}
} }
sort.Strings(sortedChildren) // Alphabetical for determinism sort.Strings(sortedChildren) // Alphabetical for determinism

View File

@@ -222,6 +222,7 @@ func (pt *ProjectTree) populateNode(node *ProjectNode, file string, config *pars
fileFragment.Definitions = append(fileFragment.Definitions, d) fileFragment.Definitions = append(fileFragment.Definitions, d)
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
case *parser.ObjectNode: case *parser.ObjectNode:
fileFragment.Definitions = append(fileFragment.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
if _, ok := node.Children[norm]; !ok { if _, ok := node.Children[norm]; !ok {
node.Children[norm] = &ProjectNode{ node.Children[norm] = &ProjectNode{
@@ -276,6 +277,7 @@ func (pt *ProjectTree) addObjectFragment(node *ProjectNode, file string, obj *pa
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
pt.extractFieldMetadata(node, d) pt.extractFieldMetadata(node, d)
case *parser.ObjectNode: case *parser.ObjectNode:
frag.Definitions = append(frag.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
if _, ok := node.Children[norm]; !ok { if _, ok := node.Children[norm]; !ok {
node.Children[norm] = &ProjectNode{ node.Children[norm] = &ProjectNode{
@@ -390,25 +392,65 @@ func (pt *ProjectTree) ResolveReferences() {
for i := range pt.References { for i := range pt.References {
ref := &pt.References[i] ref := &pt.References[i]
if isoNode, ok := pt.IsolatedFiles[ref.File]; ok { if isoNode, ok := pt.IsolatedFiles[ref.File]; ok {
ref.Target = pt.findNode(isoNode, ref.Name) ref.Target = pt.FindNode(isoNode, ref.Name, nil)
} else { } else {
ref.Target = pt.findNode(pt.Root, ref.Name) ref.Target = pt.FindNode(pt.Root, ref.Name, nil)
} }
} }
} }
func (pt *ProjectTree) findNode(root *ProjectNode, name string) *ProjectNode { func (pt *ProjectTree) FindNode(root *ProjectNode, name string, predicate func(*ProjectNode) bool) *ProjectNode {
if strings.Contains(name, ".") {
parts := strings.Split(name, ".")
rootName := parts[0]
var candidates []*ProjectNode
pt.findAllNodes(root, rootName, &candidates)
for _, cand := range candidates {
curr := cand
valid := true
for i := 1; i < len(parts); i++ {
nextName := parts[i]
normNext := NormalizeName(nextName)
if child, ok := curr.Children[normNext]; ok {
curr = child
} else {
valid = false
break
}
}
if valid {
if predicate == nil || predicate(curr) {
return curr
}
}
}
return nil
}
if root.RealName == name || root.Name == name { if root.RealName == name || root.Name == name {
return root if predicate == nil || predicate(root) {
return root
}
} }
for _, child := range root.Children { for _, child := range root.Children {
if res := pt.findNode(child, name); res != nil { if res := pt.FindNode(child, name, predicate); res != nil {
return res return res
} }
} }
return nil return nil
} }
func (pt *ProjectTree) findAllNodes(root *ProjectNode, name string, results *[]*ProjectNode) {
if root.RealName == name || root.Name == name {
*results = append(*results, root)
}
for _, child := range root.Children {
pt.findAllNodes(child, name, results)
}
}
type QueryResult struct { type QueryResult struct {
Node *ProjectNode Node *ProjectNode
Field *parser.Field Field *parser.Field

View File

@@ -173,38 +173,38 @@ func TestHandleReferences(t *testing.T) {
func TestLSPFormatting(t *testing.T) { func TestLSPFormatting(t *testing.T) {
// Setup // Setup
content := ` content := `
#package Proj.Main #package Proj.Main
+Object={ +Object={
Field=1 Field=1
} }
` `
uri := "file:///test.marte" uri := "file:///test.marte"
// Open (populate documents map) // Open (populate documents map)
documents[uri] = content documents[uri] = content
// Format // Format
params := DocumentFormattingParams{ params := DocumentFormattingParams{
TextDocument: TextDocumentIdentifier{URI: uri}, TextDocument: TextDocumentIdentifier{URI: uri},
} }
edits := handleFormatting(params) edits := handleFormatting(params)
if len(edits) != 1 { if len(edits) != 1 {
t.Fatalf("Expected 1 edit, got %d", len(edits)) t.Fatalf("Expected 1 edit, got %d", len(edits))
} }
newText := edits[0].NewText newText := edits[0].NewText
expected := `#package Proj.Main expected := `#package Proj.Main
+Object = { +Object = {
Field = 1 Field = 1
} }
` `
// Normalize newlines for comparison just in case // Normalize newlines for comparison just in case
if strings.TrimSpace(strings.ReplaceAll(newText, "\r\n", "\n")) != strings.TrimSpace(strings.ReplaceAll(expected, "\r\n", "\n")) { if strings.TrimSpace(strings.ReplaceAll(newText, "\r\n", "\n")) != strings.TrimSpace(strings.ReplaceAll(expected, "\r\n", "\n")) {
t.Errorf("Formatting mismatch.\nExpected:\n%s\nGot:\n%s", expected, newText) t.Errorf("Formatting mismatch.\nExpected:\n%s\nGot:\n%s", expected, newText)
} }
} }

View File

@@ -204,12 +204,12 @@ func (p *Parser) parseSubnode() (Subnode, error) {
func (p *Parser) parseValue() (Value, error) { func (p *Parser) parseValue() (Value, error) {
tok := p.next() tok := p.next()
switch tok.Type { switch tok.Type {
case TokenString: case TokenString:
return &StringValue{ return &StringValue{
Position: tok.Position, Position: tok.Position,
Value: strings.Trim(tok.Value, "\""), Value: strings.Trim(tok.Value, "\""),
Quoted: true, Quoted: true,
}, nil }, nil
case TokenNumber: case TokenNumber:
// Simplistic handling // Simplistic handling

View File

@@ -509,7 +509,7 @@ func (v *Validator) getFieldValue(f *parser.Field) string {
func (v *Validator) resolveReference(name string, file string, predicate func(*index.ProjectNode) bool) *index.ProjectNode { func (v *Validator) resolveReference(name string, file string, predicate func(*index.ProjectNode) bool) *index.ProjectNode {
if isoNode, ok := v.Tree.IsolatedFiles[file]; ok { if isoNode, ok := v.Tree.IsolatedFiles[file]; ok {
if found := v.findNodeRecursive(isoNode, name, predicate); found != nil { if found := v.Tree.FindNode(isoNode, name, predicate); found != nil {
return found return found
} }
return nil return nil
@@ -517,24 +517,7 @@ func (v *Validator) resolveReference(name string, file string, predicate func(*i
if v.Tree.Root == nil { if v.Tree.Root == nil {
return nil return nil
} }
return v.findNodeRecursive(v.Tree.Root, name, predicate) return v.Tree.FindNode(v.Tree.Root, name, predicate)
}
func (v *Validator) findNodeRecursive(root *index.ProjectNode, name string, predicate func(*index.ProjectNode) bool) *index.ProjectNode {
// Simple recursive search matching name
if root.RealName == name || root.Name == index.NormalizeName(name) {
if predicate == nil || predicate(root) {
return root
}
}
// Recursive
for _, child := range root.Children {
if found := v.findNodeRecursive(child, name, predicate); found != nil {
return found
}
}
return nil
} }
func (v *Validator) getNodeClass(node *index.ProjectNode) string { func (v *Validator) getNodeClass(node *index.ProjectNode) string {
@@ -554,7 +537,7 @@ func isValidType(t string) bool {
} }
func (v *Validator) checkType(val parser.Value, expectedType string) bool { func (v *Validator) checkType(val parser.Value, expectedType string) bool {
// ... (same as before) // ... (same as before)
switch expectedType { switch expectedType {
case "int": case "int":
_, ok := val.(*parser.IntValue) _, ok := val.(*parser.IntValue)

View File

@@ -52,13 +52,19 @@ func TestLSPSignalReferences(t *testing.T) {
// Traverse to MySig // Traverse to MySig
dataNode := root.Children["Data"] dataNode := root.Children["Data"]
if dataNode == nil { t.Fatal("Data node not found") } if dataNode == nil {
t.Fatal("Data node not found")
}
myDS := dataNode.Children["MyDS"] myDS := dataNode.Children["MyDS"]
if myDS == nil { t.Fatal("MyDS node not found") } if myDS == nil {
t.Fatal("MyDS node not found")
}
signals := myDS.Children["Signals"] signals := myDS.Children["Signals"]
if signals == nil { t.Fatal("Signals node not found") } if signals == nil {
t.Fatal("Signals node not found")
}
mySigDef := signals.Children["MySig"] mySigDef := signals.Children["MySig"]
if mySigDef == nil { if mySigDef == nil {

View File

@@ -49,13 +49,13 @@ func TestFunctionsArrayValidation(t *testing.T) {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "not found or is not a valid GAM") { if strings.Contains(d.Message, "not found or is not a valid GAM") {
// This covers both InvalidGAM and MissingGAM cases // This covers both InvalidGAM and MissingGAM cases
if strings.Contains(d.Message, "InvalidGAM") { if strings.Contains(d.Message, "InvalidGAM") {
foundInvalid = true foundInvalid = true
} }
if strings.Contains(d.Message, "MissingGAM") { if strings.Contains(d.Message, "MissingGAM") {
foundMissing = true foundMissing = true
} }
} }
if strings.Contains(d.Message, "must contain references") { if strings.Contains(d.Message, "must contain references") {
foundNotRef = true foundNotRef = true

View File

@@ -91,10 +91,10 @@ func TestGAMSignalValidation(t *testing.T) {
} }
if !foundBadInput || !foundMissing || !foundBadOutput { if !foundBadInput || !foundMissing || !foundBadOutput {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
t.Logf("Diagnostic: %s", d.Message) t.Logf("Diagnostic: %s", d.Message)
} }
} }
if !foundBadInput { if !foundBadInput {
t.Error("Expected error for OutDS in InputSignals") t.Error("Expected error for OutDS in InputSignals")

View File

@@ -22,22 +22,22 @@ func TestGlobalPragmaDebug(t *testing.T) {
t.Fatalf("Parse failed: %v", err) t.Fatalf("Parse failed: %v", err)
} }
// Check if pragma parsed // Check if pragma parsed
if len(config.Pragmas) == 0 { if len(config.Pragmas) == 0 {
t.Fatal("Pragma not parsed") t.Fatal("Pragma not parsed")
} }
t.Logf("Parsed Pragma 0: %s", config.Pragmas[0].Text) t.Logf("Parsed Pragma 0: %s", config.Pragmas[0].Text)
idx := index.NewProjectTree() idx := index.NewProjectTree()
idx.AddFile("debug.marte", config) idx.AddFile("debug.marte", config)
idx.ResolveReferences() idx.ResolveReferences()
// Check if added to GlobalPragmas // Check if added to GlobalPragmas
pragmas, ok := idx.GlobalPragmas["debug.marte"] pragmas, ok := idx.GlobalPragmas["debug.marte"]
if !ok || len(pragmas) == 0 { if !ok || len(pragmas) == 0 {
t.Fatal("GlobalPragmas not populated") t.Fatal("GlobalPragmas not populated")
} }
t.Logf("Global Pragma stored: %s", pragmas[0]) t.Logf("Global Pragma stored: %s", pragmas[0])
v := validator.NewValidator(idx, ".") v := validator.NewValidator(idx, ".")
v.ValidateProject() v.ValidateProject()
@@ -48,11 +48,11 @@ func TestGlobalPragmaDebug(t *testing.T) {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "Implicitly Defined Signal") { if strings.Contains(d.Message, "Implicitly Defined Signal") {
foundImplicitWarning = true foundImplicitWarning = true
t.Logf("Found warning: %s", d.Message) t.Logf("Found warning: %s", d.Message)
} }
if strings.Contains(d.Message, "Unused GAM") { if strings.Contains(d.Message, "Unused GAM") {
foundUnusedWarning = true foundUnusedWarning = true
t.Logf("Found warning: %s", d.Message) t.Logf("Found warning: %s", d.Message)
} }
} }

View File

@@ -64,10 +64,10 @@ func TestImplicitSignal(t *testing.T) {
} }
if !foundWarning || foundError { if !foundWarning || foundError {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
t.Logf("Diagnostic: %s", d.Message) t.Logf("Diagnostic: %s", d.Message)
} }
} }
if !foundWarning { if !foundWarning {
t.Error("Expected warning for ImplicitSig") t.Error("Expected warning for ImplicitSig")
@@ -83,9 +83,9 @@ func TestImplicitSignal(t *testing.T) {
` `
p2 := parser.NewParser(contentMissingType) p2 := parser.NewParser(contentMissingType)
config2, err2 := p2.Parse() config2, err2 := p2.Parse()
if err2 != nil { if err2 != nil {
t.Fatalf("Parse2 failed: %v", err2) t.Fatalf("Parse2 failed: %v", err2)
} }
idx2 := index.NewProjectTree() idx2 := index.NewProjectTree()
idx2.AddFile("missing_type.marte", config2) idx2.AddFile("missing_type.marte", config2)
idx2.ResolveReferences() idx2.ResolveReferences()
@@ -99,9 +99,9 @@ func TestImplicitSignal(t *testing.T) {
} }
} }
if !foundTypeErr { if !foundTypeErr {
for _, d := range v2.Diagnostics { for _, d := range v2.Diagnostics {
t.Logf("Diagnostic2: %s", d.Message) t.Logf("Diagnostic2: %s", d.Message)
} }
t.Error("Expected error for missing Type in implicit signal") t.Error("Expected error for missing Type in implicit signal")
} }
} }

View File

@@ -32,18 +32,18 @@ func TestMultiFileNodeValidation(t *testing.T) {
// Resolving references might be needed if the validator relies on it for merging implicitly // Resolving references might be needed if the validator relies on it for merging implicitly
// But primarily we want to check if the validator sees the merged node. // But primarily we want to check if the validator sees the merged node.
// The current implementation of Validator likely iterates over the ProjectTree. // The current implementation of Validator likely iterates over the ProjectTree.
// If the ProjectTree doesn't merge nodes automatically, the Validator needs to do it. // If the ProjectTree doesn't merge nodes automatically, the Validator needs to do it.
// However, the spec says "The build tool, validator, and LSP must merge these definitions". // However, the spec says "The build tool, validator, and LSP must merge these definitions".
// Let's assume the Validator or Index does the merging logic. // Let's assume the Validator or Index does the merging logic.
v := validator.NewValidator(idx, ".") v := validator.NewValidator(idx, ".")
v.ValidateProject() v.ValidateProject()
// +MyNode is split. // +MyNode is split.
// valid_1 has FieldA // valid_1 has FieldA
// valid_2 has Class and FieldB // valid_2 has Class and FieldB
// If merging works, it should have a Class, so no error about missing Class. // If merging works, it should have a Class, so no error about missing Class.
for _, diag := range v.Diagnostics { for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "must contain a 'Class' field") { if strings.Contains(diag.Message, "must contain a 'Class' field") {
@@ -80,13 +80,13 @@ func TestMultiFileReference(t *testing.T) {
idx.ResolveReferences() idx.ResolveReferences()
// Check if the reference in +SourceNode to TargetNode is resolved. // Check if the reference in +SourceNode to TargetNode is resolved.
v := validator.NewValidator(idx, ".") v := validator.NewValidator(idx, ".")
v.ValidateProject() v.ValidateProject()
if len(v.Diagnostics) > 0 { if len(v.Diagnostics) > 0 {
// Filter out irrelevant errors // Filter out irrelevant errors
} }
} }
func TestHierarchicalPackageMerge(t *testing.T) { func TestHierarchicalPackageMerge(t *testing.T) {
@@ -154,43 +154,43 @@ func TestHierarchicalDuplicate(t *testing.T) {
func TestIsolatedFileValidation(t *testing.T) { func TestIsolatedFileValidation(t *testing.T) {
idx := index.NewProjectTree() idx := index.NewProjectTree()
// File 1: Has package. Defines SharedClass. // File 1: Has package. Defines SharedClass.
f1Content := ` f1Content := `
#package Proj.Pkg #package Proj.Pkg
+SharedObj = { Class = SharedClass } +SharedObj = { Class = SharedClass }
` `
p1 := parser.NewParser(f1Content) p1 := parser.NewParser(f1Content)
c1, _ := p1.Parse() c1, _ := p1.Parse()
idx.AddFile("shared.marte", c1) idx.AddFile("shared.marte", c1)
// File 2: No package. References SharedObj. // File 2: No package. References SharedObj.
// Should NOT resolve to SharedObj in shared.marte because iso.marte is isolated. // Should NOT resolve to SharedObj in shared.marte because iso.marte is isolated.
f2Content := ` f2Content := `
+IsoObj = { +IsoObj = {
Class = "MyClass" Class = "MyClass"
Ref = SharedObj Ref = SharedObj
} }
` `
p2 := parser.NewParser(f2Content) p2 := parser.NewParser(f2Content)
c2, _ := p2.Parse() c2, _ := p2.Parse()
idx.AddFile("iso.marte", c2) idx.AddFile("iso.marte", c2)
idx.ResolveReferences() idx.ResolveReferences()
// Find reference // Find reference
var ref *index.Reference var ref *index.Reference
for i := range idx.References { for i := range idx.References {
if idx.References[i].File == "iso.marte" && idx.References[i].Name == "SharedObj" { if idx.References[i].File == "iso.marte" && idx.References[i].Name == "SharedObj" {
ref = &idx.References[i] ref = &idx.References[i]
break break
} }
} }
if ref == nil { if ref == nil {
t.Fatal("Reference SharedObj not found in index") t.Fatal("Reference SharedObj not found in index")
} }
if ref.Target != nil { if ref.Target != nil {
t.Errorf("Expected reference in isolated file to be unresolved, but got target in %s", ref.Target.Fragments[0].File) t.Errorf("Expected reference in isolated file to be unresolved, but got target in %s", ref.Target.Fragments[0].File)
} }
} }