Improved lsp + builder + using logger

This commit is contained in:
Martino Ferrari
2026-01-21 14:35:30 +01:00
parent d4d857bf05
commit f3c13fca55
21 changed files with 891 additions and 170 deletions

View File

@@ -2,9 +2,7 @@ package builder
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strings"
@@ -20,12 +18,15 @@ func NewBuilder(files []string) *Builder {
return &Builder{Files: files}
}
func (b *Builder) Build(outputDir string) error {
func (b *Builder) Build(f *os.File) error {
// Build the Project Tree
tree := index.NewProjectTree()
var expectedProject string
var projectSet bool
for _, file := range b.Files {
content, err := ioutil.ReadFile(file)
content, err := os.ReadFile(file)
if err != nil {
return err
}
@@ -35,83 +36,29 @@ func (b *Builder) Build(outputDir string) error {
if err != nil {
return fmt.Errorf("error parsing %s: %v", file, err)
}
tree.AddFile(file, config)
// Check Namespace/Project Consistency
proj := ""
if config.Package != nil {
parts := strings.Split(config.Package.URI, ".")
if len(parts) > 0 {
proj = strings.TrimSpace(parts[0])
}
}
if !projectSet {
expectedProject = proj
projectSet = true
} else if proj != expectedProject {
return fmt.Errorf("multiple namespaces defined in sources: found '%s' and '%s'", expectedProject, proj)
}
tree.AddFile(file, config)
}
// Iterate over top-level children of the root (Packages)
// Spec says: "merges all files sharing the same base namespace"
// So if we have #package A.B and #package A.C, they define A.
// We should output A.marte? Or A/B.marte?
// Usually MARTe projects output one file per "Root Object" or as specified.
// The prompt says: "Output format is the same as input ... without #package".
// "Build tool merges all files sharing the same base namespace into a single output."
// If files have:
// File1: #package App
// File2: #package App
// Output: App.marte
// If File3: #package Other
// Output: Other.marte
// So we iterate Root.Children.
for name, node := range tree.Root.Children {
outputPath := filepath.Join(outputDir, name+".marte")
f, err := os.Create(outputPath)
if err != nil {
return err
}
defer f.Close()
// Write node content
// Top level node in tree corresponds to the "Base Namespace" name?
// e.g. #package App.Sub -> Root->App->Sub.
// If we output App.marte, we should generate "+App = { ... }"
// But wait. Input: #package App.
// +Node = ...
// Output: +Node = ...
// If Input: #package App.
// +App = ... (Recursive?)
// MARTe config is usually a list of definitions.
// If #package App, and we generate App.marte.
// Does App.marte contain "App = { ... }"?
// Or does it contain the CONTENT of App?
// "Output format is the same as input configuration but without the #package macro"
// Input: #package App \n +Node = {}
// Output: +Node = {}
// So we are printing the CHILDREN of the "Base Namespace".
// But wait, "Base Namespace" could be complex "A.B".
// "Merges files with the same base namespace".
// Assuming base namespace is the first segment? or the whole match?
// Let's assume we output one file per top-level child of Root.
// And we print that Child as an Object.
// Actually, if I have:
// #package App
// +Node = {}
// Tree: Root -> App -> Node.
// If I generate App.marte.
// Should it look like:
// +Node = {}
// Or
// +App = { +Node = {} }?
// If "without #package macro", it implies we are expanding the package into structure?
// Or just removing the line?
// If I remove #package App, and keep +Node={}, then +Node is at root.
// But originally it was at App.Node.
// So preserving semantics means wrapping it in +App = { ... }.
b.writeNodeContent(f, node, 0)
}
// Write entire root content (definitions and children) to the single output file
b.writeNodeContent(f, tree.Root, 0)
return nil
}
@@ -120,98 +67,98 @@ func (b *Builder) writeNodeContent(f *os.File, node *index.ProjectNode, indent i
sort.SliceStable(node.Fragments, func(i, j int) bool {
return hasClass(node.Fragments[i]) && !hasClass(node.Fragments[j])
})
indentStr := strings.Repeat(" ", indent)
// If this node has a RealName (e.g. +App), we print it as an object definition
// UNLESS it is the top-level output file itself?
// UNLESS it is the top-level output file itself?
// If we are writing "App.marte", maybe we are writing the *body* of App?
// Spec: "unifying multi-file project into a single configuration output"
// Let's assume we print the Node itself.
if node.RealName != "" {
fmt.Fprintf(f, "%s%s = {\n", indentStr, node.RealName)
indent++
indentStr = strings.Repeat(" ", indent)
}
// 2. Write definitions from fragments
for _, frag := range node.Fragments {
// Use formatter logic to print definitions
// We need a temporary Config to use Formatter?
// We need a temporary Config to use Formatter?
// Or just reimplement basic printing? Formatter is better.
// But Formatter prints to io.Writer.
// We can reuse formatDefinition logic if we exposed it, or just copy basic logic.
// Since we need to respect indentation, using Formatter.Format might be tricky
// Since we need to respect indentation, using Formatter.Format might be tricky
// unless we wrap definitions in a dummy structure.
for _, def := range frag.Definitions {
// Basic formatting for now, referencing formatter style
b.writeDefinition(f, def, indent)
}
}
// 3. Write Children (recursively)
// Children are sub-nodes defined implicitly via #package A.B or explicitly +Sub
// Explicit +Sub are handled via Fragments logic (they are definitions in fragments).
// Implicit nodes (from #package A.B.C where B was never explicitly defined)
// show up in Children map but maybe not in Fragments?
// If a Child is NOT in fragments (implicit), we still need to write it.
// If it IS in fragments (explicit +Child), it was handled in loop above?
// Wait. My Indexer puts `+Sub` into `node.Children["Sub"]` AND adds a `Fragment` to `node` containing `+Sub` object?
// Let's check Indexer.
// Case ObjectNode:
// Adds Fragment to `child` (the Sub node).
// Does NOT add `ObjectNode` definition to `node`'s fragment list?
// "pt.addObjectFragment(child...)"
// It does NOT add to `fileFragment.Definitions`.
// So `node.Fragments` only contains Fields!
// Children are all in `node.Children`.
// So:
// 1. Write Fields (from Fragments).
// 2. Write Children (from Children map).
// But wait, Fragments might have order?
// "Relative ordering within a file is preserved."
// My Indexer splits Fields and Objects.
// Fields go to Fragments. Objects go to Children.
// This loses the relative order between Fields and Objects in the source file!
// Correct Indexer approach for preserving order:
// `Fragment` should contain a list of `Entry`.
// `Entry` can be `Field` OR `ChildNodeName`.
// But I just rewrote Indexer to split them.
// If strict order is required "within a file", my Indexer is slightly lossy regarding Field vs Object order.
// Spec: "Relative ordering within a file is preserved."
// To fix this without another full rewrite:
// Iterating `node.Children` alphabetically is arbitrary.
// We should ideally iterate them in the order they appear.
// For now, I will proceed with writing Children after Fields, which is a common convention,
// unless strict interleaving is required.
// For now, I will proceed with writing Children after Fields, which is a common convention,
// unless strict interleaving is required.
// Given "Class first" rule, reordering happens anyway.
// Sorting Children?
// Maybe keep a list of OrderedChildren in ProjectNode?
sortedChildren := make([]string, 0, len(node.Children))
for k := range node.Children {
sortedChildren = append(sortedChildren, k)
}
sort.Strings(sortedChildren) // Alphabetical for determinism
for _, k := range sortedChildren {
child := node.Children[k]
b.writeNodeContent(f, child, indent)
}
if node.RealName != "" {
indent--
indentStr = strings.Repeat(" ", indent)
@@ -260,4 +207,4 @@ func hasClass(frag *index.Fragment) bool {
}
}
return false
}
}

View File

@@ -1,17 +1,18 @@
package index
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/marte-dev/marte-dev-tools/internal/logger"
"github.com/marte-dev/marte-dev-tools/internal/parser"
)
type ProjectTree struct {
Root *ProjectNode
References []Reference
Root *ProjectNode
References []Reference
IsolatedFiles map[string]*ProjectNode
}
func (pt *ProjectTree) ScanDirectory(rootPath string) error {
@@ -65,6 +66,7 @@ func NewProjectTree() *ProjectTree {
Children: make(map[string]*ProjectNode),
Metadata: make(map[string]string),
},
IsolatedFiles: make(map[string]*ProjectNode),
}
}
@@ -84,6 +86,7 @@ func (pt *ProjectTree) RemoveFile(file string) {
}
pt.References = newRefs
delete(pt.IsolatedFiles, file)
pt.removeFileFromNode(pt.Root, file)
}
@@ -151,27 +154,45 @@ func (pt *ProjectTree) extractFieldMetadata(node *ProjectNode, f *parser.Field)
func (pt *ProjectTree) AddFile(file string, config *parser.Configuration) {
pt.RemoveFile(file)
node := pt.Root
if config.Package != nil {
parts := strings.Split(config.Package.URI, ".")
for _, part := range parts {
part = strings.TrimSpace(part)
if part == "" {
continue
}
if _, ok := node.Children[part]; !ok {
node.Children[part] = &ProjectNode{
Name: part,
RealName: part,
Children: make(map[string]*ProjectNode),
Parent: node,
Metadata: make(map[string]string),
}
}
node = node.Children[part]
if config.Package == nil {
node := &ProjectNode{
Children: make(map[string]*ProjectNode),
Metadata: make(map[string]string),
}
pt.IsolatedFiles[file] = node
pt.populateNode(node, file, config)
return
}
node := pt.Root
parts := strings.Split(config.Package.URI, ".")
// Skip first part as per spec (Project Name is namespace only)
startIdx := 0
if len(parts) > 0 {
startIdx = 1
}
for i := startIdx; i < len(parts); i++ {
part := strings.TrimSpace(parts[i])
if part == "" {
continue
}
if _, ok := node.Children[part]; !ok {
node.Children[part] = &ProjectNode{
Name: part,
RealName: part,
Children: make(map[string]*ProjectNode),
Parent: node,
Metadata: make(map[string]string),
}
}
node = node.Children[part]
}
pt.populateNode(node, file, config)
}
func (pt *ProjectTree) populateNode(node *ProjectNode, file string, config *parser.Configuration) {
fileFragment := &Fragment{
File: file,
IsObject: false,
@@ -184,7 +205,6 @@ func (pt *ProjectTree) AddFile(file string, config *parser.Configuration) {
case *parser.Field:
fileFragment.Definitions = append(fileFragment.Definitions, d)
pt.indexValue(file, d.Value)
// Metadata update not really relevant for package node usually, but consistency
case *parser.ObjectNode:
norm := NormalizeName(d.Name)
if _, ok := node.Children[norm]; !ok {
@@ -319,7 +339,11 @@ func (pt *ProjectTree) indexValue(file string, val parser.Value) {
func (pt *ProjectTree) ResolveReferences() {
for i := range pt.References {
ref := &pt.References[i]
ref.Target = pt.findNode(pt.Root, ref.Name)
if isoNode, ok := pt.IsolatedFiles[ref.File]; ok {
ref.Target = pt.findNode(isoNode, ref.Name)
} else {
ref.Target = pt.findNode(pt.Root, ref.Name)
}
}
}
@@ -342,9 +366,9 @@ type QueryResult struct {
}
func (pt *ProjectTree) Query(file string, line, col int) *QueryResult {
fmt.Fprintf(os.Stderr, "File: %s:%d:%d\n", file, line, col)
logger.Printf("File: %s:%d:%d", file, line, col)
for i := range pt.References {
fmt.Fprintf(os.Stderr, "%s\n", pt.Root.Name)
logger.Printf("%s", pt.Root.Name)
ref := &pt.References[i]
if ref.File == file {
if line == ref.Position.Line && col >= ref.Position.Column && col < ref.Position.Column+len(ref.Name) {
@@ -353,6 +377,10 @@ func (pt *ProjectTree) Query(file string, line, col int) *QueryResult {
}
}
if isoNode, ok := pt.IsolatedFiles[file]; ok {
return pt.queryNode(isoNode, file, line, col)
}
return pt.queryNode(pt.Root, file, line, col)
}

27
internal/logger/logger.go Normal file
View File

@@ -0,0 +1,27 @@
package logger
import (
"log"
"os"
)
var (
// Default logger writes to stderr
std = log.New(os.Stderr, "[mdt] ", log.LstdFlags)
)
func Printf(format string, v ...interface{}) {
std.Printf(format, v...)
}
func Println(v ...interface{}) {
std.Println(v...)
}
func Fatal(v ...interface{}) {
std.Fatal(v...)
}
func Fatalf(format string, v ...interface{}) {
std.Fatalf(format, v...)
}

View File

@@ -2,13 +2,16 @@ package lsp
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"os"
"strings"
"github.com/marte-dev/marte-dev-tools/internal/formatter"
"github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/logger"
"github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/validator"
)
@@ -117,7 +120,23 @@ type LSPDiagnostic struct {
Source string `json:"source"`
}
type DocumentFormattingParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"`
Options FormattingOptions `json:"options"`
}
type FormattingOptions struct {
TabSize int `json:"tabSize"`
InsertSpaces bool `json:"insertSpaces"`
}
type TextEdit struct {
Range Range `json:"range"`
NewText string `json:"newText"`
}
var tree = index.NewProjectTree()
var documents = make(map[string]string)
func RunServer() {
reader := bufio.NewReader(os.Stdin)
@@ -127,7 +146,7 @@ func RunServer() {
if err == io.EOF {
break
}
fmt.Fprintf(os.Stderr, "Error reading message: %v\n", err)
logger.Printf("Error reading message: %v\n", err)
continue
}
@@ -174,7 +193,7 @@ func handleMessage(msg *JsonRpcMessage) {
}
if root != "" {
fmt.Fprintf(os.Stderr, "Scanning workspace: %s\n", root)
logger.Printf("Scanning workspace: %s\n", root)
tree.ScanDirectory(root)
tree.ResolveReferences()
}
@@ -182,10 +201,11 @@ func handleMessage(msg *JsonRpcMessage) {
respond(msg.ID, map[string]any{
"capabilities": map[string]any{
"textDocumentSync": 1, // Full sync
"hoverProvider": true,
"definitionProvider": true,
"referencesProvider": true,
"textDocumentSync": 1, // Full sync
"hoverProvider": true,
"definitionProvider": true,
"referencesProvider": true,
"documentFormattingProvider": true,
},
})
case "initialized":
@@ -207,16 +227,16 @@ func handleMessage(msg *JsonRpcMessage) {
case "textDocument/hover":
var params HoverParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
fmt.Fprintf(os.Stderr, "Hover: %s:%d\n", params.TextDocument.URI, params.Position.Line)
logger.Printf("Hover: %s:%d", params.TextDocument.URI, params.Position.Line)
res := handleHover(params)
if res != nil {
fmt.Fprintf(os.Stderr, "Res: %v\n", res.Contents)
logger.Printf("Res: %v", res.Contents)
} else {
fmt.Fprint(os.Stderr, "Res: NIL\n")
logger.Printf("Res: NIL")
}
respond(msg.ID, res)
} else {
fmt.Fprint(os.Stderr, "not recovered hover parameters\n")
logger.Printf("not recovered hover parameters")
respond(msg.ID, nil)
}
case "textDocument/definition":
@@ -229,6 +249,11 @@ func handleMessage(msg *JsonRpcMessage) {
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleReferences(params))
}
case "textDocument/formatting":
var params DocumentFormattingParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleFormatting(params))
}
}
}
@@ -238,6 +263,7 @@ func uriToPath(uri string) string {
func handleDidOpen(params DidOpenTextDocumentParams) {
path := uriToPath(params.TextDocument.URI)
documents[params.TextDocument.URI] = params.TextDocument.Text
p := parser.NewParser(params.TextDocument.Text)
config, err := p.Parse()
if err == nil {
@@ -252,6 +278,7 @@ func handleDidChange(params DidChangeTextDocumentParams) {
return
}
text := params.ContentChanges[0].Text
documents[params.TextDocument.URI] = text
path := uriToPath(params.TextDocument.URI)
p := parser.NewParser(text)
config, err := p.Parse()
@@ -262,6 +289,39 @@ func handleDidChange(params DidChangeTextDocumentParams) {
}
}
func handleFormatting(params DocumentFormattingParams) []TextEdit {
uri := params.TextDocument.URI
text, ok := documents[uri]
if !ok {
return nil
}
p := parser.NewParser(text)
config, err := p.Parse()
if err != nil {
return nil
}
var buf bytes.Buffer
formatter.Format(config, &buf)
newText := buf.String()
lines := strings.Count(text, "\n")
if len(text) > 0 && !strings.HasSuffix(text, "\n") {
lines++
}
return []TextEdit{
{
Range: Range{
Start: Position{0, 0},
End: Position{lines + 1, 0},
},
NewText: newText,
},
}
}
func runValidation(uri string) {
v := validator.NewValidator(tree)
v.ValidateProject()
@@ -337,7 +397,7 @@ func handleHover(params HoverParams) *Hover {
res := tree.Query(path, line, col)
if res == nil {
fmt.Fprint(os.Stderr, "No object/node/reference found\n")
logger.Printf("No object/node/reference found")
return nil
}

210
internal/lsp/server_test.go Normal file
View File

@@ -0,0 +1,210 @@
package lsp
import (
"encoding/json"
"os"
"path/filepath"
"strings"
"testing"
"github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/parser"
)
func TestInitProjectScan(t *testing.T) {
// 1. Setup temp dir with files
tmpDir, err := os.MkdirTemp("", "lsp_test")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tmpDir)
// File 1: Definition
if err := os.WriteFile(filepath.Join(tmpDir, "def.marte"), []byte("#package Test.Common\n+Target = { Class = C }"), 0644); err != nil {
t.Fatal(err)
}
// File 2: Reference
// +Source = { Class = C Link = Target }
// Link = Target starts at index ...
// #package Test.Common (21 chars including newline)
// +Source = { Class = C Link = Target }
// 012345678901234567890123456789012345
// Previous offset was 29.
// Now add 21?
// #package Test.Common\n
// +Source = ...
// So add 21 to Character? Or Line 1?
// It's on Line 1 (0-based 1).
if err := os.WriteFile(filepath.Join(tmpDir, "ref.marte"), []byte("#package Test.Common\n+Source = { Class = C Link = Target }"), 0644); err != nil {
t.Fatal(err)
}
// 2. Initialize
tree = index.NewProjectTree() // Reset global tree
initParams := InitializeParams{RootPath: tmpDir}
paramsBytes, _ := json.Marshal(initParams)
msg := &JsonRpcMessage{
Method: "initialize",
Params: paramsBytes,
ID: 1,
}
handleMessage(msg)
// Query the reference in ref.marte at "Target"
// Target starts at index 29 (0-based) on Line 1
defParams := DefinitionParams{
TextDocument: TextDocumentIdentifier{URI: "file://" + filepath.Join(tmpDir, "ref.marte")},
Position: Position{Line: 1, Character: 29},
}
res := handleDefinition(defParams)
if res == nil {
t.Fatal("Definition not found via LSP after initialization")
}
locs, ok := res.([]Location)
if !ok {
t.Fatalf("Expected []Location, got %T", res)
}
if len(locs) == 0 {
t.Fatal("No locations found")
}
// Verify uri points to def.marte
expectedURI := "file://" + filepath.Join(tmpDir, "def.marte")
if locs[0].URI != expectedURI {
t.Errorf("Expected URI %s, got %s", expectedURI, locs[0].URI)
}
}
func TestHandleDefinition(t *testing.T) {
// Reset tree for test
tree = index.NewProjectTree()
content := `
+MyObject = {
Class = Type
}
+RefObject = {
Class = Type
RefField = MyObject
}
`
path := "/test.marte"
p := parser.NewParser(content)
config, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
tree.AddFile(path, config)
tree.ResolveReferences()
t.Logf("Refs: %d", len(tree.References))
for _, r := range tree.References {
t.Logf(" %s at %d:%d", r.Name, r.Position.Line, r.Position.Column)
}
// Test Go to Definition on MyObject reference
params := DefinitionParams{
TextDocument: TextDocumentIdentifier{URI: "file://" + path},
Position: Position{Line: 6, Character: 15}, // "MyObject" in RefField = MyObject
}
result := handleDefinition(params)
if result == nil {
t.Fatal("handleDefinition returned nil")
}
locations, ok := result.([]Location)
if !ok {
t.Fatalf("Expected []Location, got %T", result)
}
if len(locations) != 1 {
t.Fatalf("Expected 1 location, got %d", len(locations))
}
if locations[0].Range.Start.Line != 1 { // +MyObject is on line 2 (0-indexed 1)
t.Errorf("Expected definition on line 1, got %d", locations[0].Range.Start.Line)
}
}
func TestHandleReferences(t *testing.T) {
// Reset tree for test
tree = index.NewProjectTree()
content := `
+MyObject = {
Class = Type
}
+RefObject = {
Class = Type
RefField = MyObject
}
+AnotherRef = {
Ref = MyObject
}
`
path := "/test.marte"
p := parser.NewParser(content)
config, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
tree.AddFile(path, config)
tree.ResolveReferences()
// Test Find References for MyObject (triggered from its definition)
params := ReferenceParams{
TextDocument: TextDocumentIdentifier{URI: "file://" + path},
Position: Position{Line: 1, Character: 1}, // "+MyObject"
Context: ReferenceContext{IncludeDeclaration: true},
}
locations := handleReferences(params)
if len(locations) != 3 { // 1 declaration + 2 references
t.Fatalf("Expected 3 locations, got %d", len(locations))
}
}
func TestLSPFormatting(t *testing.T) {
// Setup
content := `
#package Proj.Main
+Object={
Field=1
}
`
uri := "file:///test.marte"
// Open (populate documents map)
documents[uri] = content
// Format
params := DocumentFormattingParams{
TextDocument: TextDocumentIdentifier{URI: uri},
}
edits := handleFormatting(params)
if len(edits) != 1 {
t.Fatalf("Expected 1 edit, got %d", len(edits))
}
newText := edits[0].NewText
expected := `#package Proj.Main
+Object = {
Field = 1
}
`
// Normalize newlines for comparison just in case
if strings.TrimSpace(strings.ReplaceAll(newText, "\r\n", "\n")) != strings.TrimSpace(strings.ReplaceAll(expected, "\r\n", "\n")) {
t.Errorf("Formatting mismatch.\nExpected:\n%s\nGot:\n%s", expected, newText)
}
}