better multi file support

This commit is contained in:
Martino Ferrari
2026-01-21 10:16:54 +01:00
parent fe4bb7c11e
commit d4d857bf05
7 changed files with 400 additions and 30 deletions

View File

@@ -3,6 +3,7 @@ package index
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/marte-dev/marte-dev-tools/internal/parser"
@@ -13,6 +14,26 @@ type ProjectTree struct {
References []Reference
}
func (pt *ProjectTree) ScanDirectory(rootPath string) error {
return filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(info.Name(), ".marte") {
content, err := os.ReadFile(path)
if err != nil {
return err // Or log and continue
}
p := parser.NewParser(string(content))
config, err := p.Parse()
if err == nil {
pt.AddFile(path, config)
}
}
return nil
})
}
type Reference struct {
Name string
Position parser.Position

View File

@@ -10,6 +10,7 @@ import (
"github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/validator"
)
type JsonRpcMessage struct {
@@ -26,6 +27,11 @@ type JsonRpcError struct {
Message string `json:"message"`
}
type InitializeParams struct {
RootURI string `json:"rootUri"`
RootPath string `json:"rootPath"`
}
type DidOpenTextDocumentParams struct {
TextDocument TextDocumentItem `json:"textDocument"`
}
@@ -60,6 +66,31 @@ type TextDocumentIdentifier struct {
URI string `json:"uri"`
}
type DefinitionParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"`
Position Position `json:"position"`
}
type ReferenceParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"`
Position Position `json:"position"`
Context ReferenceContext `json:"context"`
}
type ReferenceContext struct {
IncludeDeclaration bool `json:"includeDeclaration"`
}
type Location struct {
URI string `json:"uri"`
Range Range `json:"range"`
}
type Range struct {
Start Position `json:"start"`
End Position `json:"end"`
}
type Position struct {
Line int `json:"line"`
Character int `json:"character"`
@@ -74,6 +105,18 @@ type MarkupContent struct {
Value string `json:"value"`
}
type PublishDiagnosticsParams struct {
URI string `json:"uri"`
Diagnostics []LSPDiagnostic `json:"diagnostics"`
}
type LSPDiagnostic struct {
Range Range `json:"range"`
Severity int `json:"severity"`
Message string `json:"message"`
Source string `json:"source"`
}
var tree = index.NewProjectTree()
func RunServer() {
@@ -121,6 +164,22 @@ func readMessage(reader *bufio.Reader) (*JsonRpcMessage, error) {
func handleMessage(msg *JsonRpcMessage) {
switch msg.Method {
case "initialize":
var params InitializeParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
root := ""
if params.RootURI != "" {
root = uriToPath(params.RootURI)
} else if params.RootPath != "" {
root = params.RootPath
}
if root != "" {
fmt.Fprintf(os.Stderr, "Scanning workspace: %s\n", root)
tree.ScanDirectory(root)
tree.ResolveReferences()
}
}
respond(msg.ID, map[string]any{
"capabilities": map[string]any{
"textDocumentSync": 1, // Full sync
@@ -130,7 +189,7 @@ func handleMessage(msg *JsonRpcMessage) {
},
})
case "initialized":
// Do nothing
runValidation("")
case "shutdown":
respond(msg.ID, nil)
case "exit":
@@ -160,6 +219,16 @@ func handleMessage(msg *JsonRpcMessage) {
fmt.Fprint(os.Stderr, "not recovered hover parameters\n")
respond(msg.ID, nil)
}
case "textDocument/definition":
var params DefinitionParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleDefinition(params))
}
case "textDocument/references":
var params ReferenceParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleReferences(params))
}
}
}
@@ -174,6 +243,7 @@ func handleDidOpen(params DidOpenTextDocumentParams) {
if err == nil {
tree.AddFile(path, config)
tree.ResolveReferences()
runValidation(params.TextDocument.URI)
}
}
@@ -188,9 +258,78 @@ func handleDidChange(params DidChangeTextDocumentParams) {
if err == nil {
tree.AddFile(path, config)
tree.ResolveReferences()
runValidation(params.TextDocument.URI)
}
}
func runValidation(uri string) {
v := validator.NewValidator(tree)
v.ValidateProject()
v.CheckUnused()
// Group diagnostics by file
fileDiags := make(map[string][]LSPDiagnostic)
// Collect all known files to ensure we clear diagnostics for fixed files
knownFiles := make(map[string]bool)
collectFiles(tree.Root, knownFiles)
// Initialize all known files with empty diagnostics
for f := range knownFiles {
fileDiags[f] = []LSPDiagnostic{}
}
for _, d := range v.Diagnostics {
severity := 1 // Error
if d.Level == validator.LevelWarning {
severity = 2 // Warning
}
diag := LSPDiagnostic{
Range: Range{
Start: Position{Line: d.Position.Line - 1, Character: d.Position.Column - 1},
End: Position{Line: d.Position.Line - 1, Character: d.Position.Column - 1 + 10}, // Arbitrary length
},
Severity: severity,
Message: d.Message,
Source: "mdt",
}
path := d.File
if path != "" {
fileDiags[path] = append(fileDiags[path], diag)
}
}
// Send diagnostics for all known files
for path, diags := range fileDiags {
fileURI := "file://" + path
notification := JsonRpcMessage{
Jsonrpc: "2.0",
Method: "textDocument/publishDiagnostics",
Params: mustMarshal(PublishDiagnosticsParams{
URI: fileURI,
Diagnostics: diags,
}),
}
send(notification)
}
}
func collectFiles(node *index.ProjectNode, files map[string]bool) {
for _, frag := range node.Fragments {
files[frag.File] = true
}
for _, child := range node.Children {
collectFiles(child, files)
}
}
func mustMarshal(v any) json.RawMessage {
b, _ := json.Marshal(v)
return b
}
func handleHover(params HoverParams) *Hover {
path := uriToPath(params.TextDocument.URI)
line := params.Position.Line + 1
@@ -239,6 +378,93 @@ func handleHover(params HoverParams) *Hover {
}
}
func handleDefinition(params DefinitionParams) any {
path := uriToPath(params.TextDocument.URI)
line := params.Position.Line + 1
col := params.Position.Character + 1
res := tree.Query(path, line, col)
if res == nil {
return nil
}
var targetNode *index.ProjectNode
if res.Reference != nil && res.Reference.Target != nil {
targetNode = res.Reference.Target
} else if res.Node != nil {
targetNode = res.Node
}
if targetNode != nil {
var locations []Location
for _, frag := range targetNode.Fragments {
if frag.IsObject {
locations = append(locations, Location{
URI: "file://" + frag.File,
Range: Range{
Start: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1},
End: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1 + len(targetNode.RealName)},
},
})
}
}
return locations
}
return nil
}
func handleReferences(params ReferenceParams) []Location {
path := uriToPath(params.TextDocument.URI)
line := params.Position.Line + 1
col := params.Position.Character + 1
res := tree.Query(path, line, col)
if res == nil {
return nil
}
var targetNode *index.ProjectNode
if res.Node != nil {
targetNode = res.Node
} else if res.Reference != nil && res.Reference.Target != nil {
targetNode = res.Reference.Target
}
if targetNode == nil {
return nil
}
var locations []Location
if params.Context.IncludeDeclaration {
for _, frag := range targetNode.Fragments {
if frag.IsObject {
locations = append(locations, Location{
URI: "file://" + frag.File,
Range: Range{
Start: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1},
End: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1 + len(targetNode.RealName)},
},
})
}
}
}
for _, ref := range tree.References {
if ref.Target == targetNode {
locations = append(locations, Location{
URI: "file://" + ref.File,
Range: Range{
Start: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1},
End: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1 + len(ref.Name)},
},
})
}
}
return locations
}
func formatNodeInfo(node *index.ProjectNode) string {
class := node.Metadata["Class"]
if class == "" {
@@ -261,8 +487,8 @@ func formatNodeInfo(node *index.ProjectNode) string {
}
// Size
dims := node.Metadata["NumberOfDimensions"]
elems := node.Metadata["NumberOfElements"]
dims := node.Metadata["NumberOfDimensions"]
elems := node.Metadata["NumberOfElements"]
if dims != "" || elems != "" {
sigInfo += fmt.Sprintf("**Size**: `[%s]`, `%s` dims ", elems, dims)
}
@@ -287,4 +513,4 @@ func respond(id any, result any) {
func send(msg any) {
body, _ := json.Marshal(msg)
fmt.Printf("Content-Length: %d\r\n\r\n%s", len(body), body)
}
}

View File

@@ -8,8 +8,7 @@ import (
type Parser struct {
lexer *Lexer
tok Token
peeked bool
buf []Token
comments []Comment
pragmas []Pragma
}
@@ -21,21 +20,23 @@ func NewParser(input string) *Parser {
}
func (p *Parser) next() Token {
if p.peeked {
p.peeked = false
return p.tok
if len(p.buf) > 0 {
t := p.buf[0]
p.buf = p.buf[1:]
return t
}
p.tok = p.fetchToken()
return p.tok
return p.fetchToken()
}
func (p *Parser) peek() Token {
if p.peeked {
return p.tok
return p.peekN(0)
}
func (p *Parser) peekN(n int) Token {
for len(p.buf) <= n {
p.buf = append(p.buf, p.fetchToken())
}
p.tok = p.fetchToken()
p.peeked = true
return p.tok
return p.buf[n]
}
func (p *Parser) fetchToken() Token {
@@ -85,11 +86,30 @@ func (p *Parser) parseDefinition() (Definition, error) {
tok := p.next()
switch tok.Type {
case TokenIdentifier:
// field = value
// Could be Field = Value OR Node = { ... }
name := tok.Value
if p.next().Type != TokenEqual {
return nil, fmt.Errorf("%d:%d: expected =", p.tok.Position.Line, p.tok.Position.Column)
return nil, fmt.Errorf("%d:%d: expected =", tok.Position.Line, tok.Position.Column)
}
// Disambiguate based on RHS
nextTok := p.peek()
if nextTok.Type == TokenLBrace {
// Check if it looks like a Subnode (contains definitions) or Array (contains values)
if p.isSubnodeLookahead() {
sub, err := p.parseSubnode()
if err != nil {
return nil, err
}
return &ObjectNode{
Position: tok.Position,
Name: name,
Subnode: sub,
}, nil
}
}
// Default to Field
val, err := p.parseValue()
if err != nil {
return nil, err
@@ -99,11 +119,12 @@ func (p *Parser) parseDefinition() (Definition, error) {
Name: name,
Value: val,
}, nil
case TokenObjectIdentifier:
// node = subnode
name := tok.Value
if p.next().Type != TokenEqual {
return nil, fmt.Errorf("%d:%d: expected =", p.tok.Position.Line, p.tok.Position.Column)
return nil, fmt.Errorf("%d:%d: expected =", tok.Position.Line, tok.Position.Column)
}
sub, err := p.parseSubnode()
if err != nil {
@@ -119,6 +140,42 @@ func (p *Parser) parseDefinition() (Definition, error) {
}
}
func (p *Parser) isSubnodeLookahead() bool {
// We are before '{'.
// Look inside:
// peek(0) is '{'
// peek(1) is first token inside
t1 := p.peekN(1)
if t1.Type == TokenRBrace {
// {} -> Empty. Assume Array (Value) by default, unless forced?
// If we return false, it parses as ArrayValue.
// If user writes "Sig = {}", is it an empty signal?
// Empty array is more common for value.
// If "Sig" is a node, it should probably have content or use +Sig.
return false
}
if t1.Type == TokenIdentifier {
// Identifier inside.
// If followed by '=', it's a definition -> Subnode.
t2 := p.peekN(2)
if t2.Type == TokenEqual {
return true
}
// Identifier alone or followed by something else -> Reference/Value -> Array
return false
}
if t1.Type == TokenObjectIdentifier {
// +Node = ... -> Definition -> Subnode
return true
}
// Literals -> Array
return false
}
func (p *Parser) parseSubnode() (Subnode, error) {
tok := p.next()
if tok.Type != TokenLBrace {

View File

@@ -59,7 +59,6 @@ func (v *Validator) validateNode(node *index.ProjectNode) {
}
// Check for mandatory Class if it's an object node (+/$)
// Root node usually doesn't have a name or is implicit
if node.RealName != "" && (node.RealName[0] == '+' || node.RealName[0] == '$') {
hasClass := false
hasType := false
@@ -102,12 +101,75 @@ func (v *Validator) validateNode(node *index.ProjectNode) {
}
}
// Legacy/Compatibility method if needed, but we prefer ValidateProject
func (v *Validator) Validate(file string, config *parser.Configuration) {
// No-op or local checks if any
func (v *Validator) CheckUnused() {
referencedNodes := make(map[*index.ProjectNode]bool)
for _, ref := range v.Tree.References {
if ref.Target != nil {
referencedNodes[ref.Target] = true
}
}
v.checkUnusedRecursive(v.Tree.Root, referencedNodes)
}
func (v *Validator) CheckUnused() {
// To implement unused check, we'd need reference tracking in Index
// For now, focusing on duplicate fields and class validation
func (v *Validator) checkUnusedRecursive(node *index.ProjectNode, referenced map[*index.ProjectNode]bool) {
// Heuristic for GAM
if isGAM(node) {
if !referenced[node] {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelWarning,
Message: fmt.Sprintf("Unused GAM: %s is defined but not referenced in any thread or scheduler", node.RealName),
Position: v.getNodePosition(node),
File: v.getNodeFile(node),
})
}
}
// Heuristic for DataSource and its signals
if isDataSource(node) {
for _, signal := range node.Children {
if !referenced[signal] {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelWarning,
Message: fmt.Sprintf("Unused Signal: %s is defined in DataSource %s but never referenced", signal.RealName, node.RealName),
Position: v.getNodePosition(signal),
File: v.getNodeFile(signal),
})
}
}
}
for _, child := range node.Children {
v.checkUnusedRecursive(child, referenced)
}
}
func isGAM(node *index.ProjectNode) bool {
if node.RealName == "" || (node.RealName[0] != '+' && node.RealName[0] != '$') {
return false
}
_, hasInput := node.Children["InputSignals"]
_, hasOutput := node.Children["OutputSignals"]
return hasInput || hasOutput
}
func isDataSource(node *index.ProjectNode) bool {
if node.Parent != nil && node.Parent.Name == "Data" {
return true
}
return false
}
func (v *Validator) getNodePosition(node *index.ProjectNode) parser.Position {
if len(node.Fragments) > 0 {
return node.Fragments[0].ObjectPos
}
return parser.Position{Line: 1, Column: 1}
}
func (v *Validator) getNodeFile(node *index.ProjectNode) string {
if len(node.Fragments) > 0 {
return node.Fragments[0].File
}
return ""
}