implemented ordering preservation

This commit is contained in:
Martino Ferrari
2026-01-23 10:23:02 +01:00
parent e2c87c90f3
commit 5c3f05a1a4
18 changed files with 262 additions and 279 deletions

View File

@@ -71,86 +71,38 @@ func (b *Builder) writeNodeContent(f *os.File, node *index.ProjectNode, indent i
indentStr := strings.Repeat(" ", indent) indentStr := strings.Repeat(" ", indent)
// If this node has a RealName (e.g. +App), we print it as an object definition // If this node has a RealName (e.g. +App), we print it as an object definition
// UNLESS it is the top-level output file itself?
// If we are writing "App.marte", maybe we are writing the *body* of App?
// Spec: "unifying multi-file project into a single configuration output"
// Let's assume we print the Node itself.
if node.RealName != "" { if node.RealName != "" {
fmt.Fprintf(f, "%s%s = {\n", indentStr, node.RealName) fmt.Fprintf(f, "%s%s = {\n", indentStr, node.RealName)
indent++ indent++
indentStr = strings.Repeat(" ", indent) indentStr = strings.Repeat(" ", indent)
} }
writtenChildren := make(map[string]bool)
// 2. Write definitions from fragments // 2. Write definitions from fragments
for _, frag := range node.Fragments { for _, frag := range node.Fragments {
// Use formatter logic to print definitions
// We need a temporary Config to use Formatter?
// Or just reimplement basic printing? Formatter is better.
// But Formatter prints to io.Writer.
// We can reuse formatDefinition logic if we exposed it, or just copy basic logic.
// Since we need to respect indentation, using Formatter.Format might be tricky
// unless we wrap definitions in a dummy structure.
for _, def := range frag.Definitions { for _, def := range frag.Definitions {
// Basic formatting for now, referencing formatter style switch d := def.(type) {
b.writeDefinition(f, def, indent) case *parser.Field:
b.writeDefinition(f, d, indent)
case *parser.ObjectNode:
norm := index.NormalizeName(d.Name)
if child, ok := node.Children[norm]; ok {
if !writtenChildren[norm] {
b.writeNodeContent(f, child, indent)
writtenChildren[norm] = true
}
}
}
} }
} }
// 3. Write Children (recursively) // 3. Write Children (recursively)
// Children are sub-nodes defined implicitly via #package A.B or explicitly +Sub
// Explicit +Sub are handled via Fragments logic (they are definitions in fragments).
// Implicit nodes (from #package A.B.C where B was never explicitly defined)
// show up in Children map but maybe not in Fragments?
// If a Child is NOT in fragments (implicit), we still need to write it.
// If it IS in fragments (explicit +Child), it was handled in loop above?
// Wait. My Indexer puts `+Sub` into `node.Children["Sub"]` AND adds a `Fragment` to `node` containing `+Sub` object?
// Let's check Indexer.
// Case ObjectNode:
// Adds Fragment to `child` (the Sub node).
// Does NOT add `ObjectNode` definition to `node`'s fragment list?
// "pt.addObjectFragment(child...)"
// It does NOT add to `fileFragment.Definitions`.
// So `node.Fragments` only contains Fields!
// Children are all in `node.Children`.
// So:
// 1. Write Fields (from Fragments).
// 2. Write Children (from Children map).
// But wait, Fragments might have order?
// "Relative ordering within a file is preserved."
// My Indexer splits Fields and Objects.
// Fields go to Fragments. Objects go to Children.
// This loses the relative order between Fields and Objects in the source file!
// Correct Indexer approach for preserving order:
// `Fragment` should contain a list of `Entry`.
// `Entry` can be `Field` OR `ChildNodeName`.
// But I just rewrote Indexer to split them.
// If strict order is required "within a file", my Indexer is slightly lossy regarding Field vs Object order.
// Spec: "Relative ordering within a file is preserved."
// To fix this without another full rewrite:
// Iterating `node.Children` alphabetically is arbitrary.
// We should ideally iterate them in the order they appear.
// For now, I will proceed with writing Children after Fields, which is a common convention,
// unless strict interleaving is required.
// Given "Class first" rule, reordering happens anyway.
// Sorting Children?
// Maybe keep a list of OrderedChildren in ProjectNode?
sortedChildren := make([]string, 0, len(node.Children)) sortedChildren := make([]string, 0, len(node.Children))
for k := range node.Children { for k := range node.Children {
sortedChildren = append(sortedChildren, k) if !writtenChildren[k] {
sortedChildren = append(sortedChildren, k)
}
} }
sort.Strings(sortedChildren) // Alphabetical for determinism sort.Strings(sortedChildren) // Alphabetical for determinism

View File

@@ -54,7 +54,7 @@ func fixComment(text string) string {
return "//# " + text[3:] return "//# " + text[3:]
} }
} else if strings.HasPrefix(text, "//") { } else if strings.HasPrefix(text, "//") {
if len(text) > 2 && text[2] != ' ' && text[2] != '#' && text[2] != '!' { if len(text) > 2 && text[2] != ' ' && text[2] != '#' && text[2] != '!' {
return "// " + text[2:] return "// " + text[2:]
} }
} }
@@ -101,7 +101,7 @@ func (f *Formatter) formatDefinition(def parser.Definition, indent int) int {
fmt.Fprintln(f.writer) fmt.Fprintln(f.writer)
f.formatSubnode(d.Subnode, indent+1) f.formatSubnode(d.Subnode, indent+1)
fmt.Fprintf(f.writer, "%s}", indentStr) fmt.Fprintf(f.writer, "%s}", indentStr)
return d.Subnode.EndPosition.Line return d.Subnode.EndPosition.Line
} }
@@ -175,7 +175,7 @@ func (f *Formatter) flushCommentsBefore(pos parser.Position, indent int, stick b
break break
} }
} }
// If stick is true, we don't print extra newline. // If stick is true, we don't print extra newline.
// The caller will print the definition immediately after this function returns. // The caller will print the definition immediately after this function returns.
// If stick is false (e.g. end of block comments), we act normally. // If stick is false (e.g. end of block comments), we act normally.
// But actually, the previous implementation didn't print extra newlines between comments and code // But actually, the previous implementation didn't print extra newlines between comments and code
@@ -208,4 +208,4 @@ func (f *Formatter) popComment() string {
c := f.insertables[f.cursor] c := f.insertables[f.cursor]
f.cursor++ f.cursor++
return c.Text return c.Text
} }

View File

@@ -222,6 +222,7 @@ func (pt *ProjectTree) populateNode(node *ProjectNode, file string, config *pars
fileFragment.Definitions = append(fileFragment.Definitions, d) fileFragment.Definitions = append(fileFragment.Definitions, d)
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
case *parser.ObjectNode: case *parser.ObjectNode:
fileFragment.Definitions = append(fileFragment.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
if _, ok := node.Children[norm]; !ok { if _, ok := node.Children[norm]; !ok {
node.Children[norm] = &ProjectNode{ node.Children[norm] = &ProjectNode{
@@ -276,6 +277,7 @@ func (pt *ProjectTree) addObjectFragment(node *ProjectNode, file string, obj *pa
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
pt.extractFieldMetadata(node, d) pt.extractFieldMetadata(node, d)
case *parser.ObjectNode: case *parser.ObjectNode:
frag.Definitions = append(frag.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
if _, ok := node.Children[norm]; !ok { if _, ok := node.Children[norm]; !ok {
node.Children[norm] = &ProjectNode{ node.Children[norm] = &ProjectNode{
@@ -390,25 +392,65 @@ func (pt *ProjectTree) ResolveReferences() {
for i := range pt.References { for i := range pt.References {
ref := &pt.References[i] ref := &pt.References[i]
if isoNode, ok := pt.IsolatedFiles[ref.File]; ok { if isoNode, ok := pt.IsolatedFiles[ref.File]; ok {
ref.Target = pt.findNode(isoNode, ref.Name) ref.Target = pt.FindNode(isoNode, ref.Name, nil)
} else { } else {
ref.Target = pt.findNode(pt.Root, ref.Name) ref.Target = pt.FindNode(pt.Root, ref.Name, nil)
} }
} }
} }
func (pt *ProjectTree) findNode(root *ProjectNode, name string) *ProjectNode { func (pt *ProjectTree) FindNode(root *ProjectNode, name string, predicate func(*ProjectNode) bool) *ProjectNode {
if strings.Contains(name, ".") {
parts := strings.Split(name, ".")
rootName := parts[0]
var candidates []*ProjectNode
pt.findAllNodes(root, rootName, &candidates)
for _, cand := range candidates {
curr := cand
valid := true
for i := 1; i < len(parts); i++ {
nextName := parts[i]
normNext := NormalizeName(nextName)
if child, ok := curr.Children[normNext]; ok {
curr = child
} else {
valid = false
break
}
}
if valid {
if predicate == nil || predicate(curr) {
return curr
}
}
}
return nil
}
if root.RealName == name || root.Name == name { if root.RealName == name || root.Name == name {
return root if predicate == nil || predicate(root) {
return root
}
} }
for _, child := range root.Children { for _, child := range root.Children {
if res := pt.findNode(child, name); res != nil { if res := pt.FindNode(child, name, predicate); res != nil {
return res return res
} }
} }
return nil return nil
} }
func (pt *ProjectTree) findAllNodes(root *ProjectNode, name string, results *[]*ProjectNode) {
if root.RealName == name || root.Name == name {
*results = append(*results, root)
}
for _, child := range root.Children {
pt.findAllNodes(child, name, results)
}
}
type QueryResult struct { type QueryResult struct {
Node *ProjectNode Node *ProjectNode
Field *parser.Field Field *parser.Field

View File

@@ -30,7 +30,7 @@ func TestInitProjectScan(t *testing.T) {
// +Source = { Class = C Link = Target } // +Source = { Class = C Link = Target }
// 012345678901234567890123456789012345 // 012345678901234567890123456789012345
// Previous offset was 29. // Previous offset was 29.
// Now add 21? // Now add 21?
// #package Test.Common\n // #package Test.Common\n
// +Source = ... // +Source = ...
// So add 21 to Character? Or Line 1? // So add 21 to Character? Or Line 1?
@@ -84,7 +84,7 @@ func TestInitProjectScan(t *testing.T) {
func TestHandleDefinition(t *testing.T) { func TestHandleDefinition(t *testing.T) {
// Reset tree for test // Reset tree for test
tree = index.NewProjectTree() tree = index.NewProjectTree()
content := ` content := `
+MyObject = { +MyObject = {
Class = Type Class = Type
@@ -136,7 +136,7 @@ func TestHandleDefinition(t *testing.T) {
func TestHandleReferences(t *testing.T) { func TestHandleReferences(t *testing.T) {
// Reset tree for test // Reset tree for test
tree = index.NewProjectTree() tree = index.NewProjectTree()
content := ` content := `
+MyObject = { +MyObject = {
Class = Type Class = Type
@@ -173,38 +173,38 @@ func TestHandleReferences(t *testing.T) {
func TestLSPFormatting(t *testing.T) { func TestLSPFormatting(t *testing.T) {
// Setup // Setup
content := ` content := `
#package Proj.Main #package Proj.Main
+Object={ +Object={
Field=1 Field=1
} }
` `
uri := "file:///test.marte" uri := "file:///test.marte"
// Open (populate documents map) // Open (populate documents map)
documents[uri] = content documents[uri] = content
// Format // Format
params := DocumentFormattingParams{ params := DocumentFormattingParams{
TextDocument: TextDocumentIdentifier{URI: uri}, TextDocument: TextDocumentIdentifier{URI: uri},
} }
edits := handleFormatting(params) edits := handleFormatting(params)
if len(edits) != 1 { if len(edits) != 1 {
t.Fatalf("Expected 1 edit, got %d", len(edits)) t.Fatalf("Expected 1 edit, got %d", len(edits))
} }
newText := edits[0].NewText newText := edits[0].NewText
expected := `#package Proj.Main expected := `#package Proj.Main
+Object = { +Object = {
Field = 1 Field = 1
} }
` `
// Normalize newlines for comparison just in case // Normalize newlines for comparison just in case
if strings.TrimSpace(strings.ReplaceAll(newText, "\r\n", "\n")) != strings.TrimSpace(strings.ReplaceAll(expected, "\r\n", "\n")) { if strings.TrimSpace(strings.ReplaceAll(newText, "\r\n", "\n")) != strings.TrimSpace(strings.ReplaceAll(expected, "\r\n", "\n")) {
t.Errorf("Formatting mismatch.\nExpected:\n%s\nGot:\n%s", expected, newText) t.Errorf("Formatting mismatch.\nExpected:\n%s\nGot:\n%s", expected, newText)
} }
} }

View File

@@ -257,4 +257,4 @@ func (l *Lexer) lexPackage() Token {
return l.lexUntilNewline(TokenPackage) return l.lexUntilNewline(TokenPackage)
} }
return l.emit(TokenError) return l.emit(TokenError)
} }

View File

@@ -145,17 +145,17 @@ func (p *Parser) isSubnodeLookahead() bool {
// Look inside: // Look inside:
// peek(0) is '{' // peek(0) is '{'
// peek(1) is first token inside // peek(1) is first token inside
t1 := p.peekN(1) t1 := p.peekN(1)
if t1.Type == TokenRBrace { if t1.Type == TokenRBrace {
// {} -> Empty. Assume Array (Value) by default, unless forced? // {} -> Empty. Assume Array (Value) by default, unless forced?
// If we return false, it parses as ArrayValue. // If we return false, it parses as ArrayValue.
// If user writes "Sig = {}", is it an empty signal? // If user writes "Sig = {}", is it an empty signal?
// Empty array is more common for value. // Empty array is more common for value.
// If "Sig" is a node, it should probably have content or use +Sig. // If "Sig" is a node, it should probably have content or use +Sig.
return false return false
} }
if t1.Type == TokenIdentifier { if t1.Type == TokenIdentifier {
// Identifier inside. // Identifier inside.
// If followed by '=', it's a definition -> Subnode. // If followed by '=', it's a definition -> Subnode.
@@ -166,12 +166,12 @@ func (p *Parser) isSubnodeLookahead() bool {
// Identifier alone or followed by something else -> Reference/Value -> Array // Identifier alone or followed by something else -> Reference/Value -> Array
return false return false
} }
if t1.Type == TokenObjectIdentifier { if t1.Type == TokenObjectIdentifier {
// +Node = ... -> Definition -> Subnode // +Node = ... -> Definition -> Subnode
return true return true
} }
// Literals -> Array // Literals -> Array
return false return false
} }
@@ -204,13 +204,13 @@ func (p *Parser) parseSubnode() (Subnode, error) {
func (p *Parser) parseValue() (Value, error) { func (p *Parser) parseValue() (Value, error) {
tok := p.next() tok := p.next()
switch tok.Type { switch tok.Type {
case TokenString: case TokenString:
return &StringValue{ return &StringValue{
Position: tok.Position, Position: tok.Position,
Value: strings.Trim(tok.Value, "\""), Value: strings.Trim(tok.Value, "\""),
Quoted: true, Quoted: true,
}, nil }, nil
case TokenNumber: case TokenNumber:
// Simplistic handling // Simplistic handling
if strings.Contains(tok.Value, ".") || strings.Contains(tok.Value, "e") { if strings.Contains(tok.Value, ".") || strings.Contains(tok.Value, "e") {

View File

@@ -114,7 +114,7 @@ func LoadFullSchema(projectRoot string) *Schema {
sysPaths := []string{ sysPaths := []string{
"/usr/share/mdt/marte_schema.json", "/usr/share/mdt/marte_schema.json",
} }
home, err := os.UserHomeDir() home, err := os.UserHomeDir()
if err == nil { if err == nil {
sysPaths = append(sysPaths, filepath.Join(home, ".local/share/mdt/marte_schema.json")) sysPaths = append(sysPaths, filepath.Join(home, ".local/share/mdt/marte_schema.json"))
@@ -135,4 +135,4 @@ func LoadFullSchema(projectRoot string) *Schema {
} }
return s return s
} }

View File

@@ -343,7 +343,7 @@ func (v *Validator) validateGAMSignal(gamNode, signalNode *index.ProjectNode, di
var targetNode *index.ProjectNode var targetNode *index.ProjectNode
if signalsContainer, ok := dsNode.Children["Signals"]; ok { if signalsContainer, ok := dsNode.Children["Signals"]; ok {
targetNorm := index.NormalizeName(targetSignalName) targetNorm := index.NormalizeName(targetSignalName)
if child, ok := signalsContainer.Children[targetNorm]; ok { if child, ok := signalsContainer.Children[targetNorm]; ok {
targetNode = child targetNode = child
} else { } else {
@@ -404,12 +404,12 @@ func (v *Validator) validateGAMSignal(gamNode, signalNode *index.ProjectNode, di
v.updateReferenceTarget(v.getNodeFile(signalNode), val.Position, targetNode) v.updateReferenceTarget(v.getNodeFile(signalNode), val.Position, targetNode)
} }
} }
// Property checks // Property checks
v.checkSignalProperty(signalNode, targetNode, "Type") v.checkSignalProperty(signalNode, targetNode, "Type")
v.checkSignalProperty(signalNode, targetNode, "NumberOfElements") v.checkSignalProperty(signalNode, targetNode, "NumberOfElements")
v.checkSignalProperty(signalNode, targetNode, "NumberOfDimensions") v.checkSignalProperty(signalNode, targetNode, "NumberOfDimensions")
// Check Type validity if present // Check Type validity if present
if typeFields, ok := fields["Type"]; ok && len(typeFields) > 0 { if typeFields, ok := fields["Type"]; ok && len(typeFields) > 0 {
typeVal := v.getFieldValue(typeFields[0]) typeVal := v.getFieldValue(typeFields[0])
@@ -509,7 +509,7 @@ func (v *Validator) getFieldValue(f *parser.Field) string {
func (v *Validator) resolveReference(name string, file string, predicate func(*index.ProjectNode) bool) *index.ProjectNode { func (v *Validator) resolveReference(name string, file string, predicate func(*index.ProjectNode) bool) *index.ProjectNode {
if isoNode, ok := v.Tree.IsolatedFiles[file]; ok { if isoNode, ok := v.Tree.IsolatedFiles[file]; ok {
if found := v.findNodeRecursive(isoNode, name, predicate); found != nil { if found := v.Tree.FindNode(isoNode, name, predicate); found != nil {
return found return found
} }
return nil return nil
@@ -517,24 +517,7 @@ func (v *Validator) resolveReference(name string, file string, predicate func(*i
if v.Tree.Root == nil { if v.Tree.Root == nil {
return nil return nil
} }
return v.findNodeRecursive(v.Tree.Root, name, predicate) return v.Tree.FindNode(v.Tree.Root, name, predicate)
}
func (v *Validator) findNodeRecursive(root *index.ProjectNode, name string, predicate func(*index.ProjectNode) bool) *index.ProjectNode {
// Simple recursive search matching name
if root.RealName == name || root.Name == index.NormalizeName(name) {
if predicate == nil || predicate(root) {
return root
}
}
// Recursive
for _, child := range root.Children {
if found := v.findNodeRecursive(child, name, predicate); found != nil {
return found
}
}
return nil
} }
func (v *Validator) getNodeClass(node *index.ProjectNode) string { func (v *Validator) getNodeClass(node *index.ProjectNode) string {
@@ -554,7 +537,7 @@ func isValidType(t string) bool {
} }
func (v *Validator) checkType(val parser.Value, expectedType string) bool { func (v *Validator) checkType(val parser.Value, expectedType string) bool {
// ... (same as before) // ... (same as before)
switch expectedType { switch expectedType {
case "int": case "int":
_, ok := val.(*parser.IntValue) _, ok := val.(*parser.IntValue)
@@ -780,4 +763,4 @@ func (v *Validator) isGloballyAllowed(warningType string, contextFile string) bo
} }
} }
return false return false
} }

View File

@@ -18,7 +18,7 @@ func TestMultiFileBuildMergeAndOrder(t *testing.T) {
// File 1: Has FieldA, no Class. // File 1: Has FieldA, no Class.
// File 2: Has Class, FieldB. // File 2: Has Class, FieldB.
// Both in package +MyObj // Both in package +MyObj
f1Content := ` f1Content := `
#package Proj.+MyObj #package Proj.+MyObj
FieldA = 10 FieldA = 10
@@ -30,10 +30,10 @@ FieldB = 20
` `
os.WriteFile("build_multi_test/f1.marte", []byte(f1Content), 0644) os.WriteFile("build_multi_test/f1.marte", []byte(f1Content), 0644)
os.WriteFile("build_multi_test/f2.marte", []byte(f2Content), 0644) os.WriteFile("build_multi_test/f2.marte", []byte(f2Content), 0644)
// Execute Build // Execute Build
b := builder.NewBuilder([]string{"build_multi_test/f1.marte", "build_multi_test/f2.marte"}) b := builder.NewBuilder([]string{"build_multi_test/f1.marte", "build_multi_test/f2.marte"})
// Prepare output file // Prepare output file
// Should be +MyObj.marte (normalized MyObj.marte) - Actually checking content // Should be +MyObj.marte (normalized MyObj.marte) - Actually checking content
outputFile := "build_multi_test/MyObj.marte" outputFile := "build_multi_test/MyObj.marte"
@@ -48,19 +48,19 @@ FieldB = 20
t.Fatalf("Build failed: %v", err) t.Fatalf("Build failed: %v", err)
} }
f.Close() // Close to flush f.Close() // Close to flush
// Check Output // Check Output
if _, err := os.Stat(outputFile); os.IsNotExist(err) { if _, err := os.Stat(outputFile); os.IsNotExist(err) {
t.Fatalf("Expected output file not found") t.Fatalf("Expected output file not found")
} }
content, err := os.ReadFile(outputFile) content, err := os.ReadFile(outputFile)
if err != nil { if err != nil {
t.Fatalf("Failed to read output: %v", err) t.Fatalf("Failed to read output: %v", err)
} }
output := string(content) output := string(content)
// Check presence // Check presence
if !strings.Contains(output, "Class = \"MyClass\"") { if !strings.Contains(output, "Class = \"MyClass\"") {
t.Error("Output missing Class") t.Error("Output missing Class")
@@ -71,23 +71,23 @@ FieldB = 20
if !strings.Contains(output, "FieldB = 20") { if !strings.Contains(output, "FieldB = 20") {
t.Error("Output missing FieldB") t.Error("Output missing FieldB")
} }
// Check Order: Class/FieldB (from f2) should come BEFORE FieldA (from f1) // Check Order: Class/FieldB (from f2) should come BEFORE FieldA (from f1)
// because f2 has the Class definition. // because f2 has the Class definition.
idxClass := strings.Index(output, "Class") idxClass := strings.Index(output, "Class")
idxFieldB := strings.Index(output, "FieldB") idxFieldB := strings.Index(output, "FieldB")
idxFieldA := strings.Index(output, "FieldA") idxFieldA := strings.Index(output, "FieldA")
if idxClass == -1 || idxFieldB == -1 || idxFieldA == -1 { if idxClass == -1 || idxFieldB == -1 || idxFieldA == -1 {
t.Fatal("Missing fields in output") t.Fatal("Missing fields in output")
} }
// Class should be first // Class should be first
if idxClass > idxFieldA { if idxClass > idxFieldA {
t.Errorf("Expected Class (from f2) to be before FieldA (from f1). Output:\n%s", output) t.Errorf("Expected Class (from f2) to be before FieldA (from f1). Output:\n%s", output)
} }
// FieldB should be near Class (same fragment) // FieldB should be near Class (same fragment)
// FieldA should be after // FieldA should be after
if idxFieldB > idxFieldA { if idxFieldB > idxFieldA {

View File

@@ -120,7 +120,7 @@ func TestFmtCommand(t *testing.T) {
formatter.Format(config, &buf) formatter.Format(config, &buf)
output := buf.String() output := buf.String()
// Check for indentation // Check for indentation
if !strings.Contains(output, " Class = \"MyClass\"") { if !strings.Contains(output, " Class = \"MyClass\"") {
t.Error("Expected 2-space indentation for Class field") t.Error("Expected 2-space indentation for Class field")
@@ -169,7 +169,7 @@ func TestBuildCommand(t *testing.T) {
// Test Merge // Test Merge
files := []string{"integration/build_merge_1.marte", "integration/build_merge_2.marte"} files := []string{"integration/build_merge_1.marte", "integration/build_merge_2.marte"}
b := builder.NewBuilder(files) b := builder.NewBuilder(files)
outputFile, err := os.Create("build_test/TEST.marte") outputFile, err := os.Create("build_test/TEST.marte")
if err != nil { if err != nil {
t.Fatalf("Failed to create output file: %v", err) t.Fatalf("Failed to create output file: %v", err)
@@ -180,23 +180,23 @@ func TestBuildCommand(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("Build failed: %v", err) t.Fatalf("Build failed: %v", err)
} }
// Check output existence // Check output existence
if _, err := os.Stat("build_test/TEST.marte"); os.IsNotExist(err) { if _, err := os.Stat("build_test/TEST.marte"); os.IsNotExist(err) {
t.Fatalf("Expected output file build_test/TEST.marte not found") t.Fatalf("Expected output file build_test/TEST.marte not found")
} }
content, _ := ioutil.ReadFile("build_test/TEST.marte") content, _ := ioutil.ReadFile("build_test/TEST.marte")
output := string(content) output := string(content)
if !strings.Contains(output, "FieldA = 1") || !strings.Contains(output, "FieldB = 2") { if !strings.Contains(output, "FieldA = 1") || !strings.Contains(output, "FieldB = 2") {
t.Error("Merged output missing fields") t.Error("Merged output missing fields")
} }
// Test Order (Class First) // Test Order (Class First)
filesOrder := []string{"integration/build_order_1.marte", "integration/build_order_2.marte"} filesOrder := []string{"integration/build_order_1.marte", "integration/build_order_2.marte"}
bOrder := builder.NewBuilder(filesOrder) bOrder := builder.NewBuilder(filesOrder)
outputFileOrder, err := os.Create("build_test/ORDER.marte") outputFileOrder, err := os.Create("build_test/ORDER.marte")
if err != nil { if err != nil {
t.Fatalf("Failed to create output file: %v", err) t.Fatalf("Failed to create output file: %v", err)
@@ -207,18 +207,18 @@ func TestBuildCommand(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("Build order test failed: %v", err) t.Fatalf("Build order test failed: %v", err)
} }
contentOrder, _ := ioutil.ReadFile("build_test/ORDER.marte") contentOrder, _ := ioutil.ReadFile("build_test/ORDER.marte")
outputOrder := string(contentOrder) outputOrder := string(contentOrder)
// Check for Class before Field // Check for Class before Field
classIdx := strings.Index(outputOrder, "Class = \"Ordered\"") classIdx := strings.Index(outputOrder, "Class = \"Ordered\"")
fieldIdx := strings.Index(outputOrder, "Field = 1") fieldIdx := strings.Index(outputOrder, "Field = 1")
if classIdx == -1 || fieldIdx == -1 { if classIdx == -1 || fieldIdx == -1 {
t.Fatal("Missing Class or Field in ordered output") t.Fatal("Missing Class or Field in ordered output")
} }
if classIdx > fieldIdx { if classIdx > fieldIdx {
t.Error("Expected Class to appear before Field in merged output") t.Error("Expected Class to appear before Field in merged output")
} }
} }

View File

@@ -30,28 +30,28 @@ func TestLSPHoverDoc(t *testing.T) {
file := "doc.marte" file := "doc.marte"
idx.AddFile(file, config) idx.AddFile(file, config)
idx.ResolveReferences() idx.ResolveReferences()
// Test 1: Hover over +MyObject definition // Test 1: Hover over +MyObject definition
res := idx.Query(file, 4, 2) // Line 4: +MyObject res := idx.Query(file, 4, 2) // Line 4: +MyObject
if res == nil || res.Node == nil { if res == nil || res.Node == nil {
t.Fatal("Query failed for definition") t.Fatal("Query failed for definition")
} }
expectedDoc := "Object Documentation\nSecond line" expectedDoc := "Object Documentation\nSecond line"
if res.Node.Doc != expectedDoc { if res.Node.Doc != expectedDoc {
t.Errorf("Expected definition doc:\n%q\nGot:\n%q", expectedDoc, res.Node.Doc) t.Errorf("Expected definition doc:\n%q\nGot:\n%q", expectedDoc, res.Node.Doc)
} }
// Test 2: Hover over MyObject reference // Test 2: Hover over MyObject reference
resRef := idx.Query(file, 10, 16) // Line 10: RefField = MyObject resRef := idx.Query(file, 10, 16) // Line 10: RefField = MyObject
if resRef == nil || resRef.Reference == nil { if resRef == nil || resRef.Reference == nil {
t.Fatal("Query failed for reference") t.Fatal("Query failed for reference")
} }
if resRef.Reference.Target == nil { if resRef.Reference.Target == nil {
t.Fatal("Reference target not resolved") t.Fatal("Reference target not resolved")
} }
if resRef.Reference.Target.Doc != expectedDoc { if resRef.Reference.Target.Doc != expectedDoc {
t.Errorf("Expected reference target definition doc:\n%q\nGot:\n%q", expectedDoc, resRef.Reference.Target.Doc) t.Errorf("Expected reference target definition doc:\n%q\nGot:\n%q", expectedDoc, resRef.Reference.Target.Doc)
} }

View File

@@ -49,17 +49,23 @@ func TestLSPSignalReferences(t *testing.T) {
if root == nil { if root == nil {
t.Fatal("Root node not found") t.Fatal("Root node not found")
} }
// Traverse to MySig // Traverse to MySig
dataNode := root.Children["Data"] dataNode := root.Children["Data"]
if dataNode == nil { t.Fatal("Data node not found") } if dataNode == nil {
t.Fatal("Data node not found")
}
myDS := dataNode.Children["MyDS"] myDS := dataNode.Children["MyDS"]
if myDS == nil { t.Fatal("MyDS node not found") } if myDS == nil {
t.Fatal("MyDS node not found")
}
signals := myDS.Children["Signals"] signals := myDS.Children["Signals"]
if signals == nil { t.Fatal("Signals node not found") } if signals == nil {
t.Fatal("Signals node not found")
}
mySigDef := signals.Children["MySig"] mySigDef := signals.Children["MySig"]
if mySigDef == nil { if mySigDef == nil {
t.Fatal("Definition of MySig not found in tree") t.Fatal("Definition of MySig not found in tree")
@@ -84,4 +90,4 @@ func TestLSPSignalReferences(t *testing.T) {
if foundRefs != 1 { if foundRefs != 1 {
t.Errorf("Expected 1 reference (Direct), found %d", foundRefs) t.Errorf("Expected 1 reference (Direct), found %d", foundRefs)
} }
} }

View File

@@ -26,14 +26,14 @@ func loadConfig(t *testing.T, filename string) *parser.Configuration {
func TestLSPDiagnostics(t *testing.T) { func TestLSPDiagnostics(t *testing.T) {
inputFile := "integration/check_dup.marte" inputFile := "integration/check_dup.marte"
config := loadConfig(t, inputFile) config := loadConfig(t, inputFile)
// Simulate LSP logic: Build Index -> Validate // Simulate LSP logic: Build Index -> Validate
idx := index.NewProjectTree() idx := index.NewProjectTree()
idx.AddFile(inputFile, config) idx.AddFile(inputFile, config)
v := validator.NewValidator(idx, ".") v := validator.NewValidator(idx, ".")
v.ValidateProject() v.ValidateProject()
// Check for expected diagnostics // Check for expected diagnostics
found := false found := false
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
@@ -51,7 +51,7 @@ func TestLSPDiagnostics(t *testing.T) {
} }
// For GoToDefinition and References, we need to test the Indexer's ability to resolve symbols. // For GoToDefinition and References, we need to test the Indexer's ability to resolve symbols.
// Currently, my Indexer (ProjectTree) stores structure but doesn't explicitly track // Currently, my Indexer (ProjectTree) stores structure but doesn't explicitly track
// "references" in a way that maps a source position to a target symbol yet. // "references" in a way that maps a source position to a target symbol yet.
// The ProjectTree is built for structure merging. // The ProjectTree is built for structure merging.
// To support LSP "Go To Definition", we need to map usage -> definition. // To support LSP "Go To Definition", we need to map usage -> definition.
@@ -63,7 +63,7 @@ func TestLSPDiagnostics(t *testing.T) {
// Previously (before rewrite), `index.go` had `References []Reference`. // Previously (before rewrite), `index.go` had `References []Reference`.
// I removed it during the rewrite to ProjectTree! // I removed it during the rewrite to ProjectTree!
// I need to re-implement reference tracking in `ProjectTree` or a parallel structure // I need to re-implement reference tracking in `ProjectTree` or a parallel structure
// to support LSP features. // to support LSP features.
func TestLSPDefinition(t *testing.T) { func TestLSPDefinition(t *testing.T) {
// Create a virtual file content with a definition and a reference // Create a virtual file content with a definition and a reference
@@ -94,15 +94,15 @@ func TestLSPDefinition(t *testing.T) {
break break
} }
} }
if foundRef == nil { if foundRef == nil {
t.Fatal("Reference to MyObject not found in index") t.Fatal("Reference to MyObject not found in index")
} }
if foundRef.Target == nil { if foundRef.Target == nil {
t.Fatal("Reference to MyObject was not resolved to a target") t.Fatal("Reference to MyObject was not resolved to a target")
} }
if foundRef.Target.RealName != "+MyObject" { if foundRef.Target.RealName != "+MyObject" {
t.Errorf("Expected target to be +MyObject, got %s", foundRef.Target.RealName) t.Errorf("Expected target to be +MyObject, got %s", foundRef.Target.RealName)
} }
@@ -123,19 +123,19 @@ func TestLSPHover(t *testing.T) {
idx := index.NewProjectTree() idx := index.NewProjectTree()
file := "hover.marte" file := "hover.marte"
idx.AddFile(file, config) idx.AddFile(file, config)
// +MyObject is at line 2. // +MyObject is at line 2.
// Query at line 2, col 2 (on 'M' of MyObject) // Query at line 2, col 2 (on 'M' of MyObject)
res := idx.Query(file, 2, 2) res := idx.Query(file, 2, 2)
if res == nil { if res == nil {
t.Fatal("Query returned nil") t.Fatal("Query returned nil")
} }
if res.Node == nil { if res.Node == nil {
t.Fatal("Expected Node result") t.Fatal("Expected Node result")
} }
if res.Node.RealName != "+MyObject" { if res.Node.RealName != "+MyObject" {
t.Errorf("Expected +MyObject, got %s", res.Node.RealName) t.Errorf("Expected +MyObject, got %s", res.Node.RealName)
} }

View File

@@ -49,13 +49,13 @@ func TestFunctionsArrayValidation(t *testing.T) {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "not found or is not a valid GAM") { if strings.Contains(d.Message, "not found or is not a valid GAM") {
// This covers both InvalidGAM and MissingGAM cases // This covers both InvalidGAM and MissingGAM cases
if strings.Contains(d.Message, "InvalidGAM") { if strings.Contains(d.Message, "InvalidGAM") {
foundInvalid = true foundInvalid = true
} }
if strings.Contains(d.Message, "MissingGAM") { if strings.Contains(d.Message, "MissingGAM") {
foundMissing = true foundMissing = true
} }
} }
if strings.Contains(d.Message, "must contain references") { if strings.Contains(d.Message, "must contain references") {
foundNotRef = true foundNotRef = true

View File

@@ -91,10 +91,10 @@ func TestGAMSignalValidation(t *testing.T) {
} }
if !foundBadInput || !foundMissing || !foundBadOutput { if !foundBadInput || !foundMissing || !foundBadOutput {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
t.Logf("Diagnostic: %s", d.Message) t.Logf("Diagnostic: %s", d.Message)
} }
} }
if !foundBadInput { if !foundBadInput {
t.Error("Expected error for OutDS in InputSignals") t.Error("Expected error for OutDS in InputSignals")

View File

@@ -21,23 +21,23 @@ func TestGlobalPragmaDebug(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("Parse failed: %v", err) t.Fatalf("Parse failed: %v", err)
} }
// Check if pragma parsed // Check if pragma parsed
if len(config.Pragmas) == 0 { if len(config.Pragmas) == 0 {
t.Fatal("Pragma not parsed") t.Fatal("Pragma not parsed")
} }
t.Logf("Parsed Pragma 0: %s", config.Pragmas[0].Text) t.Logf("Parsed Pragma 0: %s", config.Pragmas[0].Text)
idx := index.NewProjectTree() idx := index.NewProjectTree()
idx.AddFile("debug.marte", config) idx.AddFile("debug.marte", config)
idx.ResolveReferences() idx.ResolveReferences()
// Check if added to GlobalPragmas // Check if added to GlobalPragmas
pragmas, ok := idx.GlobalPragmas["debug.marte"] pragmas, ok := idx.GlobalPragmas["debug.marte"]
if !ok || len(pragmas) == 0 { if !ok || len(pragmas) == 0 {
t.Fatal("GlobalPragmas not populated") t.Fatal("GlobalPragmas not populated")
} }
t.Logf("Global Pragma stored: %s", pragmas[0]) t.Logf("Global Pragma stored: %s", pragmas[0])
v := validator.NewValidator(idx, ".") v := validator.NewValidator(idx, ".")
v.ValidateProject() v.ValidateProject()
@@ -48,11 +48,11 @@ func TestGlobalPragmaDebug(t *testing.T) {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "Implicitly Defined Signal") { if strings.Contains(d.Message, "Implicitly Defined Signal") {
foundImplicitWarning = true foundImplicitWarning = true
t.Logf("Found warning: %s", d.Message) t.Logf("Found warning: %s", d.Message)
} }
if strings.Contains(d.Message, "Unused GAM") { if strings.Contains(d.Message, "Unused GAM") {
foundUnusedWarning = true foundUnusedWarning = true
t.Logf("Found warning: %s", d.Message) t.Logf("Found warning: %s", d.Message)
} }
} }

View File

@@ -64,10 +64,10 @@ func TestImplicitSignal(t *testing.T) {
} }
if !foundWarning || foundError { if !foundWarning || foundError {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
t.Logf("Diagnostic: %s", d.Message) t.Logf("Diagnostic: %s", d.Message)
} }
} }
if !foundWarning { if !foundWarning {
t.Error("Expected warning for ImplicitSig") t.Error("Expected warning for ImplicitSig")
@@ -83,9 +83,9 @@ func TestImplicitSignal(t *testing.T) {
` `
p2 := parser.NewParser(contentMissingType) p2 := parser.NewParser(contentMissingType)
config2, err2 := p2.Parse() config2, err2 := p2.Parse()
if err2 != nil { if err2 != nil {
t.Fatalf("Parse2 failed: %v", err2) t.Fatalf("Parse2 failed: %v", err2)
} }
idx2 := index.NewProjectTree() idx2 := index.NewProjectTree()
idx2.AddFile("missing_type.marte", config2) idx2.AddFile("missing_type.marte", config2)
idx2.ResolveReferences() idx2.ResolveReferences()
@@ -99,9 +99,9 @@ func TestImplicitSignal(t *testing.T) {
} }
} }
if !foundTypeErr { if !foundTypeErr {
for _, d := range v2.Diagnostics { for _, d := range v2.Diagnostics {
t.Logf("Diagnostic2: %s", d.Message) t.Logf("Diagnostic2: %s", d.Message)
} }
t.Error("Expected error for missing Type in implicit signal") t.Error("Expected error for missing Type in implicit signal")
} }
} }

View File

@@ -32,18 +32,18 @@ func TestMultiFileNodeValidation(t *testing.T) {
// Resolving references might be needed if the validator relies on it for merging implicitly // Resolving references might be needed if the validator relies on it for merging implicitly
// But primarily we want to check if the validator sees the merged node. // But primarily we want to check if the validator sees the merged node.
// The current implementation of Validator likely iterates over the ProjectTree. // The current implementation of Validator likely iterates over the ProjectTree.
// If the ProjectTree doesn't merge nodes automatically, the Validator needs to do it. // If the ProjectTree doesn't merge nodes automatically, the Validator needs to do it.
// However, the spec says "The build tool, validator, and LSP must merge these definitions". // However, the spec says "The build tool, validator, and LSP must merge these definitions".
// Let's assume the Validator or Index does the merging logic. // Let's assume the Validator or Index does the merging logic.
v := validator.NewValidator(idx, ".") v := validator.NewValidator(idx, ".")
v.ValidateProject() v.ValidateProject()
// +MyNode is split. // +MyNode is split.
// valid_1 has FieldA // valid_1 has FieldA
// valid_2 has Class and FieldB // valid_2 has Class and FieldB
// If merging works, it should have a Class, so no error about missing Class. // If merging works, it should have a Class, so no error about missing Class.
for _, diag := range v.Diagnostics { for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "must contain a 'Class' field") { if strings.Contains(diag.Message, "must contain a 'Class' field") {
@@ -79,14 +79,14 @@ func TestMultiFileReference(t *testing.T) {
parseAndAddToIndex(t, idx, "integration/multifile_ref_2.marte") parseAndAddToIndex(t, idx, "integration/multifile_ref_2.marte")
idx.ResolveReferences() idx.ResolveReferences()
// Check if the reference in +SourceNode to TargetNode is resolved. // Check if the reference in +SourceNode to TargetNode is resolved.
v := validator.NewValidator(idx, ".") v := validator.NewValidator(idx, ".")
v.ValidateProject() v.ValidateProject()
if len(v.Diagnostics) > 0 { if len(v.Diagnostics) > 0 {
// Filter out irrelevant errors // Filter out irrelevant errors
} }
} }
func TestHierarchicalPackageMerge(t *testing.T) { func TestHierarchicalPackageMerge(t *testing.T) {
@@ -99,13 +99,13 @@ func TestHierarchicalPackageMerge(t *testing.T) {
// +MyObj should have Class (from file 1) and FieldX (from file 2). // +MyObj should have Class (from file 1) and FieldX (from file 2).
// If Class is missing, ValidateProject reports error. // If Class is missing, ValidateProject reports error.
for _, diag := range v.Diagnostics { for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "must contain a 'Class' field") { if strings.Contains(diag.Message, "must contain a 'Class' field") {
t.Errorf("Unexpected 'Class' field error for +MyObj: %s", diag.Message) t.Errorf("Unexpected 'Class' field error for +MyObj: %s", diag.Message)
} }
} }
// We can also inspect the tree to verify FieldX is there (optional, but good for confidence) // We can also inspect the tree to verify FieldX is there (optional, but good for confidence)
baseNode := idx.Root.Children["Base"] baseNode := idx.Root.Children["Base"]
if baseNode == nil { if baseNode == nil {
@@ -115,7 +115,7 @@ func TestHierarchicalPackageMerge(t *testing.T) {
if objNode == nil { if objNode == nil {
t.Fatal("MyObj node not found in Base") t.Fatal("MyObj node not found in Base")
} }
hasFieldX := false hasFieldX := false
for _, frag := range objNode.Fragments { for _, frag := range objNode.Fragments {
for _, def := range frag.Definitions { for _, def := range frag.Definitions {
@@ -124,7 +124,7 @@ func TestHierarchicalPackageMerge(t *testing.T) {
} }
} }
} }
if !hasFieldX { if !hasFieldX {
t.Error("FieldX not found in +MyObj") t.Error("FieldX not found in +MyObj")
} }
@@ -153,44 +153,44 @@ func TestHierarchicalDuplicate(t *testing.T) {
func TestIsolatedFileValidation(t *testing.T) { func TestIsolatedFileValidation(t *testing.T) {
idx := index.NewProjectTree() idx := index.NewProjectTree()
// File 1: Has package. Defines SharedClass. // File 1: Has package. Defines SharedClass.
f1Content := ` f1Content := `
#package Proj.Pkg #package Proj.Pkg
+SharedObj = { Class = SharedClass } +SharedObj = { Class = SharedClass }
` `
p1 := parser.NewParser(f1Content) p1 := parser.NewParser(f1Content)
c1, _ := p1.Parse() c1, _ := p1.Parse()
idx.AddFile("shared.marte", c1) idx.AddFile("shared.marte", c1)
// File 2: No package. References SharedObj. // File 2: No package. References SharedObj.
// Should NOT resolve to SharedObj in shared.marte because iso.marte is isolated. // Should NOT resolve to SharedObj in shared.marte because iso.marte is isolated.
f2Content := ` f2Content := `
+IsoObj = { +IsoObj = {
Class = "MyClass" Class = "MyClass"
Ref = SharedObj Ref = SharedObj
} }
` `
p2 := parser.NewParser(f2Content) p2 := parser.NewParser(f2Content)
c2, _ := p2.Parse() c2, _ := p2.Parse()
idx.AddFile("iso.marte", c2) idx.AddFile("iso.marte", c2)
idx.ResolveReferences() idx.ResolveReferences()
// Find reference // Find reference
var ref *index.Reference var ref *index.Reference
for i := range idx.References { for i := range idx.References {
if idx.References[i].File == "iso.marte" && idx.References[i].Name == "SharedObj" { if idx.References[i].File == "iso.marte" && idx.References[i].Name == "SharedObj" {
ref = &idx.References[i] ref = &idx.References[i]
break break
} }
} }
if ref == nil { if ref == nil {
t.Fatal("Reference SharedObj not found in index") t.Fatal("Reference SharedObj not found in index")
} }
if ref.Target != nil { if ref.Target != nil {
t.Errorf("Expected reference in isolated file to be unresolved, but got target in %s", ref.Target.Fragments[0].File) t.Errorf("Expected reference in isolated file to be unresolved, but got target in %s", ref.Target.Fragments[0].File)
} }
} }