Improved lsp + builder + using logger

This commit is contained in:
Martino Ferrari
2026-01-21 14:35:30 +01:00
parent d4d857bf05
commit f3c13fca55
21 changed files with 891 additions and 170 deletions

View File

@@ -2,12 +2,12 @@ package main
import ( import (
"bytes" "bytes"
"fmt"
"os" "os"
"github.com/marte-dev/marte-dev-tools/internal/builder" "github.com/marte-dev/marte-dev-tools/internal/builder"
"github.com/marte-dev/marte-dev-tools/internal/formatter" "github.com/marte-dev/marte-dev-tools/internal/formatter"
"github.com/marte-dev/marte-dev-tools/internal/index" "github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/logger"
"github.com/marte-dev/marte-dev-tools/internal/lsp" "github.com/marte-dev/marte-dev-tools/internal/lsp"
"github.com/marte-dev/marte-dev-tools/internal/parser" "github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/validator" "github.com/marte-dev/marte-dev-tools/internal/validator"
@@ -15,8 +15,8 @@ import (
func main() { func main() {
if len(os.Args) < 2 { if len(os.Args) < 2 {
fmt.Println("Usage: mdt <command> [arguments]") logger.Println("Usage: mdt <command> [arguments]")
fmt.Println("Commands: lsp, build, check, fmt") logger.Println("Commands: lsp, build, check, fmt")
os.Exit(1) os.Exit(1)
} }
@@ -31,7 +31,7 @@ func main() {
case "fmt": case "fmt":
runFmt(os.Args[2:]) runFmt(os.Args[2:])
default: default:
fmt.Printf("Unknown command: %s\n", command) logger.Printf("Unknown command: %s\n", command)
os.Exit(1) os.Exit(1)
} }
} }
@@ -42,28 +42,21 @@ func runLSP() {
func runBuild(args []string) { func runBuild(args []string) {
if len(args) < 1 { if len(args) < 1 {
fmt.Println("Usage: mdt build <input_files...>") logger.Println("Usage: mdt build <input_files...>")
os.Exit(1) os.Exit(1)
} }
outputDir := "build"
if err := os.MkdirAll(outputDir, 0755); err != nil {
fmt.Printf("Build failed: %v\n", err)
os.Exit(1)
} else {
b := builder.NewBuilder(args) b := builder.NewBuilder(args)
err = b.Build(outputDir) err := b.Build(os.Stdout)
if err != nil { if err != nil {
fmt.Printf("Build failed: %v\n", err) logger.Printf("Build failed: %v\n", err)
os.Exit(1) os.Exit(1)
} }
fmt.Println("Build successful. Output in", outputDir)
}
} }
func runCheck(args []string) { func runCheck(args []string) {
if len(args) < 1 { if len(args) < 1 {
fmt.Println("Usage: mdt check <input_files...>") logger.Println("Usage: mdt check <input_files...>")
os.Exit(1) os.Exit(1)
} }
@@ -73,14 +66,14 @@ func runCheck(args []string) {
for _, file := range args { for _, file := range args {
content, err := os.ReadFile(file) content, err := os.ReadFile(file)
if err != nil { if err != nil {
fmt.Printf("Error reading %s: %v\n", file, err) logger.Printf("Error reading %s: %v\n", file, err)
continue continue
} }
p := parser.NewParser(string(content)) p := parser.NewParser(string(content))
config, err := p.Parse() config, err := p.Parse()
if err != nil { if err != nil {
fmt.Printf("%s: Grammar error: %v\n", file, err) logger.Printf("%s: Grammar error: %v\n", file, err)
continue continue
} }
@@ -100,33 +93,33 @@ func runCheck(args []string) {
if diag.Level == validator.LevelWarning { if diag.Level == validator.LevelWarning {
level = "WARNING" level = "WARNING"
} }
fmt.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message) logger.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message)
} }
if len(v.Diagnostics) > 0 { if len(v.Diagnostics) > 0 {
fmt.Printf("\nFound %d issues.\n", len(v.Diagnostics)) logger.Printf("\nFound %d issues.\n", len(v.Diagnostics))
} else { } else {
fmt.Println("No issues found.") logger.Println("No issues found.")
} }
} }
func runFmt(args []string) { func runFmt(args []string) {
if len(args) < 1 { if len(args) < 1 {
fmt.Println("Usage: mdt fmt <input_files...>") logger.Println("Usage: mdt fmt <input_files...>")
os.Exit(1) os.Exit(1)
} }
for _, file := range args { for _, file := range args {
content, err := os.ReadFile(file) content, err := os.ReadFile(file)
if err != nil { if err != nil {
fmt.Printf("Error reading %s: %v\n", file, err) logger.Printf("Error reading %s: %v\n", file, err)
continue continue
} }
p := parser.NewParser(string(content)) p := parser.NewParser(string(content))
config, err := p.Parse() config, err := p.Parse()
if err != nil { if err != nil {
fmt.Printf("Error parsing %s: %v\n", file, err) logger.Printf("Error parsing %s: %v\n", file, err)
continue continue
} }
@@ -135,9 +128,9 @@ func runFmt(args []string) {
err = os.WriteFile(file, buf.Bytes(), 0644) err = os.WriteFile(file, buf.Bytes(), 0644)
if err != nil { if err != nil {
fmt.Printf("Error writing %s: %v\n", file, err) logger.Printf("Error writing %s: %v\n", file, err)
continue continue
} }
fmt.Printf("Formatted %s\n", file) logger.Printf("Formatted %s\n", file)
} }
} }

View File

@@ -2,9 +2,7 @@ package builder
import ( import (
"fmt" "fmt"
"io/ioutil"
"os" "os"
"path/filepath"
"sort" "sort"
"strings" "strings"
@@ -20,12 +18,15 @@ func NewBuilder(files []string) *Builder {
return &Builder{Files: files} return &Builder{Files: files}
} }
func (b *Builder) Build(outputDir string) error { func (b *Builder) Build(f *os.File) error {
// Build the Project Tree // Build the Project Tree
tree := index.NewProjectTree() tree := index.NewProjectTree()
var expectedProject string
var projectSet bool
for _, file := range b.Files { for _, file := range b.Files {
content, err := ioutil.ReadFile(file) content, err := os.ReadFile(file)
if err != nil { if err != nil {
return err return err
} }
@@ -36,81 +37,27 @@ func (b *Builder) Build(outputDir string) error {
return fmt.Errorf("error parsing %s: %v", file, err) return fmt.Errorf("error parsing %s: %v", file, err)
} }
// Check Namespace/Project Consistency
proj := ""
if config.Package != nil {
parts := strings.Split(config.Package.URI, ".")
if len(parts) > 0 {
proj = strings.TrimSpace(parts[0])
}
}
if !projectSet {
expectedProject = proj
projectSet = true
} else if proj != expectedProject {
return fmt.Errorf("multiple namespaces defined in sources: found '%s' and '%s'", expectedProject, proj)
}
tree.AddFile(file, config) tree.AddFile(file, config)
} }
// Iterate over top-level children of the root (Packages) // Write entire root content (definitions and children) to the single output file
// Spec says: "merges all files sharing the same base namespace" b.writeNodeContent(f, tree.Root, 0)
// So if we have #package A.B and #package A.C, they define A.
// We should output A.marte? Or A/B.marte?
// Usually MARTe projects output one file per "Root Object" or as specified.
// The prompt says: "Output format is the same as input ... without #package".
// "Build tool merges all files sharing the same base namespace into a single output."
// If files have:
// File1: #package App
// File2: #package App
// Output: App.marte
// If File3: #package Other
// Output: Other.marte
// So we iterate Root.Children.
for name, node := range tree.Root.Children {
outputPath := filepath.Join(outputDir, name+".marte")
f, err := os.Create(outputPath)
if err != nil {
return err
}
defer f.Close()
// Write node content
// Top level node in tree corresponds to the "Base Namespace" name?
// e.g. #package App.Sub -> Root->App->Sub.
// If we output App.marte, we should generate "+App = { ... }"
// But wait. Input: #package App.
// +Node = ...
// Output: +Node = ...
// If Input: #package App.
// +App = ... (Recursive?)
// MARTe config is usually a list of definitions.
// If #package App, and we generate App.marte.
// Does App.marte contain "App = { ... }"?
// Or does it contain the CONTENT of App?
// "Output format is the same as input configuration but without the #package macro"
// Input: #package App \n +Node = {}
// Output: +Node = {}
// So we are printing the CHILDREN of the "Base Namespace".
// But wait, "Base Namespace" could be complex "A.B".
// "Merges files with the same base namespace".
// Assuming base namespace is the first segment? or the whole match?
// Let's assume we output one file per top-level child of Root.
// And we print that Child as an Object.
// Actually, if I have:
// #package App
// +Node = {}
// Tree: Root -> App -> Node.
// If I generate App.marte.
// Should it look like:
// +Node = {}
// Or
// +App = { +Node = {} }?
// If "without #package macro", it implies we are expanding the package into structure?
// Or just removing the line?
// If I remove #package App, and keep +Node={}, then +Node is at root.
// But originally it was at App.Node.
// So preserving semantics means wrapping it in +App = { ... }.
b.writeNodeContent(f, node, 0)
}
return nil return nil
} }

View File

@@ -1,17 +1,18 @@
package index package index
import ( import (
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/marte-dev/marte-dev-tools/internal/logger"
"github.com/marte-dev/marte-dev-tools/internal/parser" "github.com/marte-dev/marte-dev-tools/internal/parser"
) )
type ProjectTree struct { type ProjectTree struct {
Root *ProjectNode Root *ProjectNode
References []Reference References []Reference
IsolatedFiles map[string]*ProjectNode
} }
func (pt *ProjectTree) ScanDirectory(rootPath string) error { func (pt *ProjectTree) ScanDirectory(rootPath string) error {
@@ -65,6 +66,7 @@ func NewProjectTree() *ProjectTree {
Children: make(map[string]*ProjectNode), Children: make(map[string]*ProjectNode),
Metadata: make(map[string]string), Metadata: make(map[string]string),
}, },
IsolatedFiles: make(map[string]*ProjectNode),
} }
} }
@@ -84,6 +86,7 @@ func (pt *ProjectTree) RemoveFile(file string) {
} }
pt.References = newRefs pt.References = newRefs
delete(pt.IsolatedFiles, file)
pt.removeFileFromNode(pt.Root, file) pt.removeFileFromNode(pt.Root, file)
} }
@@ -151,11 +154,26 @@ func (pt *ProjectTree) extractFieldMetadata(node *ProjectNode, f *parser.Field)
func (pt *ProjectTree) AddFile(file string, config *parser.Configuration) { func (pt *ProjectTree) AddFile(file string, config *parser.Configuration) {
pt.RemoveFile(file) pt.RemoveFile(file)
if config.Package == nil {
node := &ProjectNode{
Children: make(map[string]*ProjectNode),
Metadata: make(map[string]string),
}
pt.IsolatedFiles[file] = node
pt.populateNode(node, file, config)
return
}
node := pt.Root node := pt.Root
if config.Package != nil {
parts := strings.Split(config.Package.URI, ".") parts := strings.Split(config.Package.URI, ".")
for _, part := range parts { // Skip first part as per spec (Project Name is namespace only)
part = strings.TrimSpace(part) startIdx := 0
if len(parts) > 0 {
startIdx = 1
}
for i := startIdx; i < len(parts); i++ {
part := strings.TrimSpace(parts[i])
if part == "" { if part == "" {
continue continue
} }
@@ -170,8 +188,11 @@ func (pt *ProjectTree) AddFile(file string, config *parser.Configuration) {
} }
node = node.Children[part] node = node.Children[part]
} }
pt.populateNode(node, file, config)
} }
func (pt *ProjectTree) populateNode(node *ProjectNode, file string, config *parser.Configuration) {
fileFragment := &Fragment{ fileFragment := &Fragment{
File: file, File: file,
IsObject: false, IsObject: false,
@@ -184,7 +205,6 @@ func (pt *ProjectTree) AddFile(file string, config *parser.Configuration) {
case *parser.Field: case *parser.Field:
fileFragment.Definitions = append(fileFragment.Definitions, d) fileFragment.Definitions = append(fileFragment.Definitions, d)
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
// Metadata update not really relevant for package node usually, but consistency
case *parser.ObjectNode: case *parser.ObjectNode:
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
if _, ok := node.Children[norm]; !ok { if _, ok := node.Children[norm]; !ok {
@@ -319,9 +339,13 @@ func (pt *ProjectTree) indexValue(file string, val parser.Value) {
func (pt *ProjectTree) ResolveReferences() { func (pt *ProjectTree) ResolveReferences() {
for i := range pt.References { for i := range pt.References {
ref := &pt.References[i] ref := &pt.References[i]
if isoNode, ok := pt.IsolatedFiles[ref.File]; ok {
ref.Target = pt.findNode(isoNode, ref.Name)
} else {
ref.Target = pt.findNode(pt.Root, ref.Name) ref.Target = pt.findNode(pt.Root, ref.Name)
} }
} }
}
func (pt *ProjectTree) findNode(root *ProjectNode, name string) *ProjectNode { func (pt *ProjectTree) findNode(root *ProjectNode, name string) *ProjectNode {
if root.RealName == name || root.Name == name { if root.RealName == name || root.Name == name {
@@ -342,9 +366,9 @@ type QueryResult struct {
} }
func (pt *ProjectTree) Query(file string, line, col int) *QueryResult { func (pt *ProjectTree) Query(file string, line, col int) *QueryResult {
fmt.Fprintf(os.Stderr, "File: %s:%d:%d\n", file, line, col) logger.Printf("File: %s:%d:%d", file, line, col)
for i := range pt.References { for i := range pt.References {
fmt.Fprintf(os.Stderr, "%s\n", pt.Root.Name) logger.Printf("%s", pt.Root.Name)
ref := &pt.References[i] ref := &pt.References[i]
if ref.File == file { if ref.File == file {
if line == ref.Position.Line && col >= ref.Position.Column && col < ref.Position.Column+len(ref.Name) { if line == ref.Position.Line && col >= ref.Position.Column && col < ref.Position.Column+len(ref.Name) {
@@ -353,6 +377,10 @@ func (pt *ProjectTree) Query(file string, line, col int) *QueryResult {
} }
} }
if isoNode, ok := pt.IsolatedFiles[file]; ok {
return pt.queryNode(isoNode, file, line, col)
}
return pt.queryNode(pt.Root, file, line, col) return pt.queryNode(pt.Root, file, line, col)
} }

27
internal/logger/logger.go Normal file
View File

@@ -0,0 +1,27 @@
package logger
import (
"log"
"os"
)
var (
// Default logger writes to stderr
std = log.New(os.Stderr, "[mdt] ", log.LstdFlags)
)
func Printf(format string, v ...interface{}) {
std.Printf(format, v...)
}
func Println(v ...interface{}) {
std.Println(v...)
}
func Fatal(v ...interface{}) {
std.Fatal(v...)
}
func Fatalf(format string, v ...interface{}) {
std.Fatalf(format, v...)
}

View File

@@ -2,13 +2,16 @@ package lsp
import ( import (
"bufio" "bufio"
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"os" "os"
"strings" "strings"
"github.com/marte-dev/marte-dev-tools/internal/formatter"
"github.com/marte-dev/marte-dev-tools/internal/index" "github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/logger"
"github.com/marte-dev/marte-dev-tools/internal/parser" "github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/validator" "github.com/marte-dev/marte-dev-tools/internal/validator"
) )
@@ -117,7 +120,23 @@ type LSPDiagnostic struct {
Source string `json:"source"` Source string `json:"source"`
} }
type DocumentFormattingParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"`
Options FormattingOptions `json:"options"`
}
type FormattingOptions struct {
TabSize int `json:"tabSize"`
InsertSpaces bool `json:"insertSpaces"`
}
type TextEdit struct {
Range Range `json:"range"`
NewText string `json:"newText"`
}
var tree = index.NewProjectTree() var tree = index.NewProjectTree()
var documents = make(map[string]string)
func RunServer() { func RunServer() {
reader := bufio.NewReader(os.Stdin) reader := bufio.NewReader(os.Stdin)
@@ -127,7 +146,7 @@ func RunServer() {
if err == io.EOF { if err == io.EOF {
break break
} }
fmt.Fprintf(os.Stderr, "Error reading message: %v\n", err) logger.Printf("Error reading message: %v\n", err)
continue continue
} }
@@ -174,7 +193,7 @@ func handleMessage(msg *JsonRpcMessage) {
} }
if root != "" { if root != "" {
fmt.Fprintf(os.Stderr, "Scanning workspace: %s\n", root) logger.Printf("Scanning workspace: %s\n", root)
tree.ScanDirectory(root) tree.ScanDirectory(root)
tree.ResolveReferences() tree.ResolveReferences()
} }
@@ -186,6 +205,7 @@ func handleMessage(msg *JsonRpcMessage) {
"hoverProvider": true, "hoverProvider": true,
"definitionProvider": true, "definitionProvider": true,
"referencesProvider": true, "referencesProvider": true,
"documentFormattingProvider": true,
}, },
}) })
case "initialized": case "initialized":
@@ -207,16 +227,16 @@ func handleMessage(msg *JsonRpcMessage) {
case "textDocument/hover": case "textDocument/hover":
var params HoverParams var params HoverParams
if err := json.Unmarshal(msg.Params, &params); err == nil { if err := json.Unmarshal(msg.Params, &params); err == nil {
fmt.Fprintf(os.Stderr, "Hover: %s:%d\n", params.TextDocument.URI, params.Position.Line) logger.Printf("Hover: %s:%d", params.TextDocument.URI, params.Position.Line)
res := handleHover(params) res := handleHover(params)
if res != nil { if res != nil {
fmt.Fprintf(os.Stderr, "Res: %v\n", res.Contents) logger.Printf("Res: %v", res.Contents)
} else { } else {
fmt.Fprint(os.Stderr, "Res: NIL\n") logger.Printf("Res: NIL")
} }
respond(msg.ID, res) respond(msg.ID, res)
} else { } else {
fmt.Fprint(os.Stderr, "not recovered hover parameters\n") logger.Printf("not recovered hover parameters")
respond(msg.ID, nil) respond(msg.ID, nil)
} }
case "textDocument/definition": case "textDocument/definition":
@@ -229,6 +249,11 @@ func handleMessage(msg *JsonRpcMessage) {
if err := json.Unmarshal(msg.Params, &params); err == nil { if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleReferences(params)) respond(msg.ID, handleReferences(params))
} }
case "textDocument/formatting":
var params DocumentFormattingParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleFormatting(params))
}
} }
} }
@@ -238,6 +263,7 @@ func uriToPath(uri string) string {
func handleDidOpen(params DidOpenTextDocumentParams) { func handleDidOpen(params DidOpenTextDocumentParams) {
path := uriToPath(params.TextDocument.URI) path := uriToPath(params.TextDocument.URI)
documents[params.TextDocument.URI] = params.TextDocument.Text
p := parser.NewParser(params.TextDocument.Text) p := parser.NewParser(params.TextDocument.Text)
config, err := p.Parse() config, err := p.Parse()
if err == nil { if err == nil {
@@ -252,6 +278,7 @@ func handleDidChange(params DidChangeTextDocumentParams) {
return return
} }
text := params.ContentChanges[0].Text text := params.ContentChanges[0].Text
documents[params.TextDocument.URI] = text
path := uriToPath(params.TextDocument.URI) path := uriToPath(params.TextDocument.URI)
p := parser.NewParser(text) p := parser.NewParser(text)
config, err := p.Parse() config, err := p.Parse()
@@ -262,6 +289,39 @@ func handleDidChange(params DidChangeTextDocumentParams) {
} }
} }
func handleFormatting(params DocumentFormattingParams) []TextEdit {
uri := params.TextDocument.URI
text, ok := documents[uri]
if !ok {
return nil
}
p := parser.NewParser(text)
config, err := p.Parse()
if err != nil {
return nil
}
var buf bytes.Buffer
formatter.Format(config, &buf)
newText := buf.String()
lines := strings.Count(text, "\n")
if len(text) > 0 && !strings.HasSuffix(text, "\n") {
lines++
}
return []TextEdit{
{
Range: Range{
Start: Position{0, 0},
End: Position{lines + 1, 0},
},
NewText: newText,
},
}
}
func runValidation(uri string) { func runValidation(uri string) {
v := validator.NewValidator(tree) v := validator.NewValidator(tree)
v.ValidateProject() v.ValidateProject()
@@ -337,7 +397,7 @@ func handleHover(params HoverParams) *Hover {
res := tree.Query(path, line, col) res := tree.Query(path, line, col)
if res == nil { if res == nil {
fmt.Fprint(os.Stderr, "No object/node/reference found\n") logger.Printf("No object/node/reference found")
return nil return nil
} }

210
internal/lsp/server_test.go Normal file
View File

@@ -0,0 +1,210 @@
package lsp
import (
"encoding/json"
"os"
"path/filepath"
"strings"
"testing"
"github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/parser"
)
func TestInitProjectScan(t *testing.T) {
// 1. Setup temp dir with files
tmpDir, err := os.MkdirTemp("", "lsp_test")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tmpDir)
// File 1: Definition
if err := os.WriteFile(filepath.Join(tmpDir, "def.marte"), []byte("#package Test.Common\n+Target = { Class = C }"), 0644); err != nil {
t.Fatal(err)
}
// File 2: Reference
// +Source = { Class = C Link = Target }
// Link = Target starts at index ...
// #package Test.Common (21 chars including newline)
// +Source = { Class = C Link = Target }
// 012345678901234567890123456789012345
// Previous offset was 29.
// Now add 21?
// #package Test.Common\n
// +Source = ...
// So add 21 to Character? Or Line 1?
// It's on Line 1 (0-based 1).
if err := os.WriteFile(filepath.Join(tmpDir, "ref.marte"), []byte("#package Test.Common\n+Source = { Class = C Link = Target }"), 0644); err != nil {
t.Fatal(err)
}
// 2. Initialize
tree = index.NewProjectTree() // Reset global tree
initParams := InitializeParams{RootPath: tmpDir}
paramsBytes, _ := json.Marshal(initParams)
msg := &JsonRpcMessage{
Method: "initialize",
Params: paramsBytes,
ID: 1,
}
handleMessage(msg)
// Query the reference in ref.marte at "Target"
// Target starts at index 29 (0-based) on Line 1
defParams := DefinitionParams{
TextDocument: TextDocumentIdentifier{URI: "file://" + filepath.Join(tmpDir, "ref.marte")},
Position: Position{Line: 1, Character: 29},
}
res := handleDefinition(defParams)
if res == nil {
t.Fatal("Definition not found via LSP after initialization")
}
locs, ok := res.([]Location)
if !ok {
t.Fatalf("Expected []Location, got %T", res)
}
if len(locs) == 0 {
t.Fatal("No locations found")
}
// Verify uri points to def.marte
expectedURI := "file://" + filepath.Join(tmpDir, "def.marte")
if locs[0].URI != expectedURI {
t.Errorf("Expected URI %s, got %s", expectedURI, locs[0].URI)
}
}
func TestHandleDefinition(t *testing.T) {
// Reset tree for test
tree = index.NewProjectTree()
content := `
+MyObject = {
Class = Type
}
+RefObject = {
Class = Type
RefField = MyObject
}
`
path := "/test.marte"
p := parser.NewParser(content)
config, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
tree.AddFile(path, config)
tree.ResolveReferences()
t.Logf("Refs: %d", len(tree.References))
for _, r := range tree.References {
t.Logf(" %s at %d:%d", r.Name, r.Position.Line, r.Position.Column)
}
// Test Go to Definition on MyObject reference
params := DefinitionParams{
TextDocument: TextDocumentIdentifier{URI: "file://" + path},
Position: Position{Line: 6, Character: 15}, // "MyObject" in RefField = MyObject
}
result := handleDefinition(params)
if result == nil {
t.Fatal("handleDefinition returned nil")
}
locations, ok := result.([]Location)
if !ok {
t.Fatalf("Expected []Location, got %T", result)
}
if len(locations) != 1 {
t.Fatalf("Expected 1 location, got %d", len(locations))
}
if locations[0].Range.Start.Line != 1 { // +MyObject is on line 2 (0-indexed 1)
t.Errorf("Expected definition on line 1, got %d", locations[0].Range.Start.Line)
}
}
func TestHandleReferences(t *testing.T) {
// Reset tree for test
tree = index.NewProjectTree()
content := `
+MyObject = {
Class = Type
}
+RefObject = {
Class = Type
RefField = MyObject
}
+AnotherRef = {
Ref = MyObject
}
`
path := "/test.marte"
p := parser.NewParser(content)
config, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
tree.AddFile(path, config)
tree.ResolveReferences()
// Test Find References for MyObject (triggered from its definition)
params := ReferenceParams{
TextDocument: TextDocumentIdentifier{URI: "file://" + path},
Position: Position{Line: 1, Character: 1}, // "+MyObject"
Context: ReferenceContext{IncludeDeclaration: true},
}
locations := handleReferences(params)
if len(locations) != 3 { // 1 declaration + 2 references
t.Fatalf("Expected 3 locations, got %d", len(locations))
}
}
func TestLSPFormatting(t *testing.T) {
// Setup
content := `
#package Proj.Main
+Object={
Field=1
}
`
uri := "file:///test.marte"
// Open (populate documents map)
documents[uri] = content
// Format
params := DocumentFormattingParams{
TextDocument: TextDocumentIdentifier{URI: uri},
}
edits := handleFormatting(params)
if len(edits) != 1 {
t.Fatalf("Expected 1 edit, got %d", len(edits))
}
newText := edits[0].NewText
expected := `#package Proj.Main
+Object = {
Field = 1
}
`
// Normalize newlines for comparison just in case
if strings.TrimSpace(strings.ReplaceAll(newText, "\r\n", "\n")) != strings.TrimSpace(strings.ReplaceAll(expected, "\r\n", "\n")) {
t.Errorf("Formatting mismatch.\nExpected:\n%s\nGot:\n%s", expected, newText)
}
}

BIN
mdt

Binary file not shown.

View File

@@ -30,16 +30,22 @@ The LSP server should provide the following capabilities:
- **Go to References**: Find usages of a node or field, supporting navigation across any file in the current project. - **Go to References**: Find usages of a node or field, supporting navigation across any file in the current project.
- **Code Completion**: Autocomplete fields, values, and references. - **Code Completion**: Autocomplete fields, values, and references.
- **Code Snippets**: Provide snippets for common patterns. - **Code Snippets**: Provide snippets for common patterns.
- **Formatting**: Format the document using the same rules and engine as the `fmt` command.
## Build System & File Structure ## Build System & File Structure
- **File Extension**: `.marte` - **File Extension**: `.marte`
- **Project Structure**: Files can be distributed across sub-folders. - **Project Structure**: Files can be distributed across sub-folders.
- **Namespaces**: The `#package` macro defines the namespace for the file. - **Namespaces**: The `#package` macro defines the namespace for the file.
- **Semantic**: `#package PROJECT.NODE` implies that all definitions within the file are treated as children/fields of the node `NODE`. - **Single File Context**: If no `#package` is defined in a file, the LSP, build tool, and validator must consider **only** that file (no project-wide merging or referencing).
- **Semantic**: `#package PROJECT_NAME.SUB_URI` implies that:
- `PROJECT_NAME` is a namespace identifier used to group files from the same project. It does **not** create a node in the configuration tree.
- `SUB_URI` defines the path of nodes where the file's definitions are placed. All definitions within the file are treated as children/fields of the node defined by `SUB_URI`.
- **URI Symbols**: The symbols `+` and `$` used for object nodes are **not** written in the URI of the `#package` macro (e.g., use `PROJECT.NODE` even if the node is defined as `+NODE`). - **URI Symbols**: The symbols `+` and `$` used for object nodes are **not** written in the URI of the `#package` macro (e.g., use `PROJECT.NODE` even if the node is defined as `+NODE`).
- **Build Process**: - **Build Process**:
- The build tool merges all files sharing the same base namespace. - The build tool merges all files sharing the same base namespace into a **single output configuration**.
- **Namespace Consistency**: The build tool must verify that all input files belong to the same project namespace (the first segment of the `#package` URI). If multiple project namespaces are detected, the build must fail with an error.
- **Target**: The build output is written to a single target file (e.g., provided via CLI or API).
- **Multi-File Definitions**: Nodes and objects can be defined across multiple files. The build tool, validator, and LSP must merge these definitions (including all fields and sub-nodes) from the entire project to create a unified view before processing or validating. - **Multi-File Definitions**: Nodes and objects can be defined across multiple files. The build tool, validator, and LSP must merge these definitions (including all fields and sub-nodes) from the entire project to create a unified view before processing or validating.
- **Global References**: References to nodes, signals, or objects can point to definitions located in any file within the project. - **Global References**: References to nodes, signals, or objects can point to definitions located in any file within the project.
- **Merging Order**: For objects defined across multiple files, the **first file** to be considered is the one containing the `Class` field definition. - **Merging Order**: For objects defined across multiple files, the **first file** to be considered is the one containing the `Class` field definition.
@@ -183,3 +189,8 @@ The LSP and `check` command should report the following:
- Missing mandatory fields. - Missing mandatory fields.
- Field type mismatches. - Field type mismatches.
- Grammar errors (e.g., missing closing brackets). - Grammar errors (e.g., missing closing brackets).
## Logging
- **Requirement**: All logs must be managed through a centralized logger.
- **Output**: Logs should be written to `stderr` by default to avoid interfering with `stdout` which might be used for CLI output (e.g., build artifacts or formatted text).

View File

@@ -0,0 +1,97 @@
package integration
import (
"io/ioutil"
"os"
"strings"
"testing"
"github.com/marte-dev/marte-dev-tools/internal/builder"
)
func TestMultiFileBuildMergeAndOrder(t *testing.T) {
// Setup
os.RemoveAll("build_multi_test")
os.MkdirAll("build_multi_test", 0755)
defer os.RemoveAll("build_multi_test")
// Create source files
// File 1: Has FieldA, no Class.
// File 2: Has Class, FieldB.
// Both in package +MyObj
f1Content := `
#package Proj.+MyObj
FieldA = 10
`
f2Content := `
#package Proj.+MyObj
Class = "MyClass"
FieldB = 20
`
ioutil.WriteFile("build_multi_test/f1.marte", []byte(f1Content), 0644)
ioutil.WriteFile("build_multi_test/f2.marte", []byte(f2Content), 0644)
// Execute Build
b := builder.NewBuilder([]string{"build_multi_test/f1.marte", "build_multi_test/f2.marte"})
// Prepare output file
// Should be +MyObj.marte (normalized MyObj.marte) - Actually checking content
outputFile := "build_multi_test/MyObj.marte"
f, err := os.Create(outputFile)
if err != nil {
t.Fatalf("Failed to create output file: %v", err)
}
defer f.Close()
err = b.Build(f)
if err != nil {
t.Fatalf("Build failed: %v", err)
}
f.Close() // Close to flush
// Check Output
if _, err := os.Stat(outputFile); os.IsNotExist(err) {
t.Fatalf("Expected output file not found")
}
content, err := ioutil.ReadFile(outputFile)
if err != nil {
t.Fatalf("Failed to read output: %v", err)
}
output := string(content)
// Check presence
if !strings.Contains(output, "Class = \"MyClass\"") {
t.Error("Output missing Class")
}
if !strings.Contains(output, "FieldA = 10") {
t.Error("Output missing FieldA")
}
if !strings.Contains(output, "FieldB = 20") {
t.Error("Output missing FieldB")
}
// Check Order: Class/FieldB (from f2) should come BEFORE FieldA (from f1)
// because f2 has the Class definition.
idxClass := strings.Index(output, "Class")
idxFieldB := strings.Index(output, "FieldB")
idxFieldA := strings.Index(output, "FieldA")
if idxClass == -1 || idxFieldB == -1 || idxFieldA == -1 {
t.Fatal("Missing fields in output")
}
// Class should be first
if idxClass > idxFieldA {
t.Errorf("Expected Class (from f2) to be before FieldA (from f1). Output:\n%s", output)
}
// FieldB should be near Class (same fragment)
// FieldA should be after
if idxFieldB > idxFieldA {
t.Errorf("Expected FieldB (from f2) to be before FieldA (from f1). Output:\n%s", output)
}
}

View File

@@ -0,0 +1,6 @@
#package Proj.DupBase
+DupObj = {
Class = "DupClass"
FieldY = 1
}

View File

@@ -0,0 +1,3 @@
#package Proj.DupBase.DupObj
FieldY = 2

View File

@@ -0,0 +1,5 @@
#package Proj.Base
+MyObj = {
Class = "BaseClass"
}

View File

@@ -0,0 +1,3 @@
#package Proj.Base.MyObj
FieldX = 100

View File

@@ -0,0 +1,6 @@
#package Proj.TestPackage
+DupNode = {
Class = "DupClass"
FieldX = 1
}

View File

@@ -0,0 +1,5 @@
#package Proj.TestPackage
+DupNode = {
FieldX = 2
}

View File

@@ -0,0 +1,5 @@
#package Proj.TestPackage
+TargetNode = {
Class = "TargetClass"
}

View File

@@ -0,0 +1,6 @@
#package Proj.TestPackage
+SourceNode = {
Class = "SourceClass"
Target = TargetNode
}

View File

@@ -0,0 +1,5 @@
#package Proj.TestPackage
+MyNode = {
FieldA = 10
}

View File

@@ -0,0 +1,6 @@
#package Proj.TestPackage
+MyNode = {
Class = "MyClass"
FieldB = 20
}

View File

@@ -0,0 +1,196 @@
package integration
import (
"io/ioutil"
"strings"
"testing"
"github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/validator"
)
func parseAndAddToIndex(t *testing.T, idx *index.ProjectTree, filePath string) {
content, err := ioutil.ReadFile(filePath)
if err != nil {
t.Fatalf("Failed to read %s: %v", filePath, err)
}
p := parser.NewParser(string(content))
config, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed for %s: %v", filePath, err)
}
idx.AddFile(filePath, config)
}
func TestMultiFileNodeValidation(t *testing.T) {
idx := index.NewProjectTree()
parseAndAddToIndex(t, idx, "integration/multifile_valid_1.marte")
parseAndAddToIndex(t, idx, "integration/multifile_valid_2.marte")
// Resolving references might be needed if the validator relies on it for merging implicitly
// But primarily we want to check if the validator sees the merged node.
// The current implementation of Validator likely iterates over the ProjectTree.
// If the ProjectTree doesn't merge nodes automatically, the Validator needs to do it.
// However, the spec says "The build tool, validator, and LSP must merge these definitions".
// Let's assume the Validator or Index does the merging logic.
v := validator.NewValidator(idx)
v.ValidateProject()
// +MyNode is split.
// valid_1 has FieldA
// valid_2 has Class and FieldB
// If merging works, it should have a Class, so no error about missing Class.
for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "must contain a 'Class' field") {
t.Errorf("Unexpected 'Class' field error for +MyNode: %s", diag.Message)
}
}
}
func TestMultiFileDuplicateField(t *testing.T) {
idx := index.NewProjectTree()
parseAndAddToIndex(t, idx, "integration/multifile_dup_1.marte")
parseAndAddToIndex(t, idx, "integration/multifile_dup_2.marte")
v := validator.NewValidator(idx)
v.ValidateProject()
foundError := false
for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "Duplicate Field Definition") && strings.Contains(diag.Message, "FieldX") {
foundError = true
break
}
}
if !foundError {
t.Errorf("Expected duplicate field error for FieldX in +DupNode, but found none")
}
}
func TestMultiFileReference(t *testing.T) {
idx := index.NewProjectTree()
parseAndAddToIndex(t, idx, "integration/multifile_ref_1.marte")
parseAndAddToIndex(t, idx, "integration/multifile_ref_2.marte")
idx.ResolveReferences()
// Check if the reference in +SourceNode to TargetNode is resolved.
v := validator.NewValidator(idx)
v.ValidateProject()
if len(v.Diagnostics) > 0 {
// Filter out irrelevant errors
}
}
func TestHierarchicalPackageMerge(t *testing.T) {
idx := index.NewProjectTree()
parseAndAddToIndex(t, idx, "integration/hierarchical_pkg_1.marte")
parseAndAddToIndex(t, idx, "integration/hierarchical_pkg_2.marte")
v := validator.NewValidator(idx)
v.ValidateProject()
// +MyObj should have Class (from file 1) and FieldX (from file 2).
// If Class is missing, ValidateProject reports error.
for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "must contain a 'Class' field") {
t.Errorf("Unexpected 'Class' field error for +MyObj: %s", diag.Message)
}
}
// We can also inspect the tree to verify FieldX is there (optional, but good for confidence)
baseNode := idx.Root.Children["Base"]
if baseNode == nil {
t.Fatal("Base node not found")
}
objNode := baseNode.Children["MyObj"]
if objNode == nil {
t.Fatal("MyObj node not found in Base")
}
hasFieldX := false
for _, frag := range objNode.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok && f.Name == "FieldX" {
hasFieldX = true
}
}
}
if !hasFieldX {
t.Error("FieldX not found in +MyObj")
}
}
func TestHierarchicalDuplicate(t *testing.T) {
idx := index.NewProjectTree()
parseAndAddToIndex(t, idx, "integration/hierarchical_dup_1.marte")
parseAndAddToIndex(t, idx, "integration/hierarchical_dup_2.marte")
v := validator.NewValidator(idx)
v.ValidateProject()
foundError := false
for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "Duplicate Field Definition") && strings.Contains(diag.Message, "FieldY") {
foundError = true
break
}
}
if !foundError {
t.Errorf("Expected duplicate field error for FieldY in +DupObj (hierarchical), but found none")
}
}
func TestIsolatedFileValidation(t *testing.T) {
idx := index.NewProjectTree()
// File 1: Has package. Defines SharedClass.
f1Content := `
#package Proj.Pkg
+SharedObj = { Class = SharedClass }
`
p1 := parser.NewParser(f1Content)
c1, _ := p1.Parse()
idx.AddFile("shared.marte", c1)
// File 2: No package. References SharedObj.
// Should NOT resolve to SharedObj in shared.marte because iso.marte is isolated.
f2Content := `
+IsoObj = {
Class = "MyClass"
Ref = SharedObj
}
`
p2 := parser.NewParser(f2Content)
c2, _ := p2.Parse()
idx.AddFile("iso.marte", c2)
idx.ResolveReferences()
// Find reference
var ref *index.Reference
for i := range idx.References {
if idx.References[i].File == "iso.marte" && idx.References[i].Name == "SharedObj" {
ref = &idx.References[i]
break
}
}
if ref == nil {
t.Fatal("Reference SharedObj not found in index")
}
if ref.Target != nil {
t.Errorf("Expected reference in isolated file to be unresolved, but got target in %s", ref.Target.Fragments[0].File)
}
}

View File

@@ -0,0 +1,102 @@
package integration
import (
"testing"
"github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/validator"
)
func TestUnusedGAM(t *testing.T) {
content := `
+MyGAM = {
Class = GAMClass
+InputSignals = {}
}
+UsedGAM = {
Class = GAMClass
+InputSignals = {}
}
$App = {
$Data = {}
$States = {
$State = {
$Threads = {
$Thread = {
Functions = { UsedGAM }
}
}
}
}
}
`
p := parser.NewParser(content)
config, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
idx := index.NewProjectTree()
idx.AddFile("test.marte", config)
idx.ResolveReferences()
v := validator.NewValidator(idx)
v.CheckUnused()
foundUnused := false
for _, d := range v.Diagnostics {
if d.Message == "Unused GAM: +MyGAM is defined but not referenced in any thread or scheduler" {
foundUnused = true
break
}
}
if !foundUnused {
t.Error("Expected warning for unused GAM +MyGAM, but found none")
}
}
func TestUnusedSignal(t *testing.T) {
content := `
$App = {
$Data = {
+MyDS = {
Class = DataSourceClass
Sig1 = { Type = uint32 }
Sig2 = { Type = uint32 }
}
}
}
+MyGAM = {
Class = GAMClass
+InputSignals = {
S1 = { DataSource = MyDS Alias = Sig1 }
}
}
`
p := parser.NewParser(content)
config, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
idx := index.NewProjectTree()
idx.AddFile("test.marte", config)
idx.ResolveReferences()
v := validator.NewValidator(idx)
v.CheckUnused()
foundUnusedSig2 := false
for _, d := range v.Diagnostics {
if d.Message == "Unused Signal: Sig2 is defined in DataSource +MyDS but never referenced" {
foundUnusedSig2 = true
break
}
}
if !foundUnusedSig2 {
t.Error("Expected warning for unused signal Sig2, but found none")
}
}