better multi file support

This commit is contained in:
Martino Ferrari
2026-01-21 10:16:54 +01:00
parent fe4bb7c11e
commit d4d857bf05
7 changed files with 400 additions and 30 deletions

View File

@@ -3,6 +3,7 @@ package index
import ( import (
"fmt" "fmt"
"os" "os"
"path/filepath"
"strings" "strings"
"github.com/marte-dev/marte-dev-tools/internal/parser" "github.com/marte-dev/marte-dev-tools/internal/parser"
@@ -13,6 +14,26 @@ type ProjectTree struct {
References []Reference References []Reference
} }
func (pt *ProjectTree) ScanDirectory(rootPath string) error {
return filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(info.Name(), ".marte") {
content, err := os.ReadFile(path)
if err != nil {
return err // Or log and continue
}
p := parser.NewParser(string(content))
config, err := p.Parse()
if err == nil {
pt.AddFile(path, config)
}
}
return nil
})
}
type Reference struct { type Reference struct {
Name string Name string
Position parser.Position Position parser.Position

View File

@@ -10,6 +10,7 @@ import (
"github.com/marte-dev/marte-dev-tools/internal/index" "github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/parser" "github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/validator"
) )
type JsonRpcMessage struct { type JsonRpcMessage struct {
@@ -26,6 +27,11 @@ type JsonRpcError struct {
Message string `json:"message"` Message string `json:"message"`
} }
type InitializeParams struct {
RootURI string `json:"rootUri"`
RootPath string `json:"rootPath"`
}
type DidOpenTextDocumentParams struct { type DidOpenTextDocumentParams struct {
TextDocument TextDocumentItem `json:"textDocument"` TextDocument TextDocumentItem `json:"textDocument"`
} }
@@ -60,6 +66,31 @@ type TextDocumentIdentifier struct {
URI string `json:"uri"` URI string `json:"uri"`
} }
type DefinitionParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"`
Position Position `json:"position"`
}
type ReferenceParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"`
Position Position `json:"position"`
Context ReferenceContext `json:"context"`
}
type ReferenceContext struct {
IncludeDeclaration bool `json:"includeDeclaration"`
}
type Location struct {
URI string `json:"uri"`
Range Range `json:"range"`
}
type Range struct {
Start Position `json:"start"`
End Position `json:"end"`
}
type Position struct { type Position struct {
Line int `json:"line"` Line int `json:"line"`
Character int `json:"character"` Character int `json:"character"`
@@ -74,6 +105,18 @@ type MarkupContent struct {
Value string `json:"value"` Value string `json:"value"`
} }
type PublishDiagnosticsParams struct {
URI string `json:"uri"`
Diagnostics []LSPDiagnostic `json:"diagnostics"`
}
type LSPDiagnostic struct {
Range Range `json:"range"`
Severity int `json:"severity"`
Message string `json:"message"`
Source string `json:"source"`
}
var tree = index.NewProjectTree() var tree = index.NewProjectTree()
func RunServer() { func RunServer() {
@@ -121,6 +164,22 @@ func readMessage(reader *bufio.Reader) (*JsonRpcMessage, error) {
func handleMessage(msg *JsonRpcMessage) { func handleMessage(msg *JsonRpcMessage) {
switch msg.Method { switch msg.Method {
case "initialize": case "initialize":
var params InitializeParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
root := ""
if params.RootURI != "" {
root = uriToPath(params.RootURI)
} else if params.RootPath != "" {
root = params.RootPath
}
if root != "" {
fmt.Fprintf(os.Stderr, "Scanning workspace: %s\n", root)
tree.ScanDirectory(root)
tree.ResolveReferences()
}
}
respond(msg.ID, map[string]any{ respond(msg.ID, map[string]any{
"capabilities": map[string]any{ "capabilities": map[string]any{
"textDocumentSync": 1, // Full sync "textDocumentSync": 1, // Full sync
@@ -130,7 +189,7 @@ func handleMessage(msg *JsonRpcMessage) {
}, },
}) })
case "initialized": case "initialized":
// Do nothing runValidation("")
case "shutdown": case "shutdown":
respond(msg.ID, nil) respond(msg.ID, nil)
case "exit": case "exit":
@@ -160,6 +219,16 @@ func handleMessage(msg *JsonRpcMessage) {
fmt.Fprint(os.Stderr, "not recovered hover parameters\n") fmt.Fprint(os.Stderr, "not recovered hover parameters\n")
respond(msg.ID, nil) respond(msg.ID, nil)
} }
case "textDocument/definition":
var params DefinitionParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleDefinition(params))
}
case "textDocument/references":
var params ReferenceParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, handleReferences(params))
}
} }
} }
@@ -174,6 +243,7 @@ func handleDidOpen(params DidOpenTextDocumentParams) {
if err == nil { if err == nil {
tree.AddFile(path, config) tree.AddFile(path, config)
tree.ResolveReferences() tree.ResolveReferences()
runValidation(params.TextDocument.URI)
} }
} }
@@ -188,9 +258,78 @@ func handleDidChange(params DidChangeTextDocumentParams) {
if err == nil { if err == nil {
tree.AddFile(path, config) tree.AddFile(path, config)
tree.ResolveReferences() tree.ResolveReferences()
runValidation(params.TextDocument.URI)
} }
} }
func runValidation(uri string) {
v := validator.NewValidator(tree)
v.ValidateProject()
v.CheckUnused()
// Group diagnostics by file
fileDiags := make(map[string][]LSPDiagnostic)
// Collect all known files to ensure we clear diagnostics for fixed files
knownFiles := make(map[string]bool)
collectFiles(tree.Root, knownFiles)
// Initialize all known files with empty diagnostics
for f := range knownFiles {
fileDiags[f] = []LSPDiagnostic{}
}
for _, d := range v.Diagnostics {
severity := 1 // Error
if d.Level == validator.LevelWarning {
severity = 2 // Warning
}
diag := LSPDiagnostic{
Range: Range{
Start: Position{Line: d.Position.Line - 1, Character: d.Position.Column - 1},
End: Position{Line: d.Position.Line - 1, Character: d.Position.Column - 1 + 10}, // Arbitrary length
},
Severity: severity,
Message: d.Message,
Source: "mdt",
}
path := d.File
if path != "" {
fileDiags[path] = append(fileDiags[path], diag)
}
}
// Send diagnostics for all known files
for path, diags := range fileDiags {
fileURI := "file://" + path
notification := JsonRpcMessage{
Jsonrpc: "2.0",
Method: "textDocument/publishDiagnostics",
Params: mustMarshal(PublishDiagnosticsParams{
URI: fileURI,
Diagnostics: diags,
}),
}
send(notification)
}
}
func collectFiles(node *index.ProjectNode, files map[string]bool) {
for _, frag := range node.Fragments {
files[frag.File] = true
}
for _, child := range node.Children {
collectFiles(child, files)
}
}
func mustMarshal(v any) json.RawMessage {
b, _ := json.Marshal(v)
return b
}
func handleHover(params HoverParams) *Hover { func handleHover(params HoverParams) *Hover {
path := uriToPath(params.TextDocument.URI) path := uriToPath(params.TextDocument.URI)
line := params.Position.Line + 1 line := params.Position.Line + 1
@@ -239,6 +378,93 @@ func handleHover(params HoverParams) *Hover {
} }
} }
func handleDefinition(params DefinitionParams) any {
path := uriToPath(params.TextDocument.URI)
line := params.Position.Line + 1
col := params.Position.Character + 1
res := tree.Query(path, line, col)
if res == nil {
return nil
}
var targetNode *index.ProjectNode
if res.Reference != nil && res.Reference.Target != nil {
targetNode = res.Reference.Target
} else if res.Node != nil {
targetNode = res.Node
}
if targetNode != nil {
var locations []Location
for _, frag := range targetNode.Fragments {
if frag.IsObject {
locations = append(locations, Location{
URI: "file://" + frag.File,
Range: Range{
Start: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1},
End: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1 + len(targetNode.RealName)},
},
})
}
}
return locations
}
return nil
}
func handleReferences(params ReferenceParams) []Location {
path := uriToPath(params.TextDocument.URI)
line := params.Position.Line + 1
col := params.Position.Character + 1
res := tree.Query(path, line, col)
if res == nil {
return nil
}
var targetNode *index.ProjectNode
if res.Node != nil {
targetNode = res.Node
} else if res.Reference != nil && res.Reference.Target != nil {
targetNode = res.Reference.Target
}
if targetNode == nil {
return nil
}
var locations []Location
if params.Context.IncludeDeclaration {
for _, frag := range targetNode.Fragments {
if frag.IsObject {
locations = append(locations, Location{
URI: "file://" + frag.File,
Range: Range{
Start: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1},
End: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1 + len(targetNode.RealName)},
},
})
}
}
}
for _, ref := range tree.References {
if ref.Target == targetNode {
locations = append(locations, Location{
URI: "file://" + ref.File,
Range: Range{
Start: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1},
End: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1 + len(ref.Name)},
},
})
}
}
return locations
}
func formatNodeInfo(node *index.ProjectNode) string { func formatNodeInfo(node *index.ProjectNode) string {
class := node.Metadata["Class"] class := node.Metadata["Class"]
if class == "" { if class == "" {
@@ -262,7 +488,7 @@ func formatNodeInfo(node *index.ProjectNode) string {
// Size // Size
dims := node.Metadata["NumberOfDimensions"] dims := node.Metadata["NumberOfDimensions"]
elems := node.Metadata["NumberOfElements"] elems := node.Metadata["NumberOfElements"]
if dims != "" || elems != "" { if dims != "" || elems != "" {
sigInfo += fmt.Sprintf("**Size**: `[%s]`, `%s` dims ", elems, dims) sigInfo += fmt.Sprintf("**Size**: `[%s]`, `%s` dims ", elems, dims)
} }

View File

@@ -8,8 +8,7 @@ import (
type Parser struct { type Parser struct {
lexer *Lexer lexer *Lexer
tok Token buf []Token
peeked bool
comments []Comment comments []Comment
pragmas []Pragma pragmas []Pragma
} }
@@ -21,21 +20,23 @@ func NewParser(input string) *Parser {
} }
func (p *Parser) next() Token { func (p *Parser) next() Token {
if p.peeked { if len(p.buf) > 0 {
p.peeked = false t := p.buf[0]
return p.tok p.buf = p.buf[1:]
return t
} }
p.tok = p.fetchToken() return p.fetchToken()
return p.tok
} }
func (p *Parser) peek() Token { func (p *Parser) peek() Token {
if p.peeked { return p.peekN(0)
return p.tok }
func (p *Parser) peekN(n int) Token {
for len(p.buf) <= n {
p.buf = append(p.buf, p.fetchToken())
} }
p.tok = p.fetchToken() return p.buf[n]
p.peeked = true
return p.tok
} }
func (p *Parser) fetchToken() Token { func (p *Parser) fetchToken() Token {
@@ -85,11 +86,30 @@ func (p *Parser) parseDefinition() (Definition, error) {
tok := p.next() tok := p.next()
switch tok.Type { switch tok.Type {
case TokenIdentifier: case TokenIdentifier:
// field = value // Could be Field = Value OR Node = { ... }
name := tok.Value name := tok.Value
if p.next().Type != TokenEqual { if p.next().Type != TokenEqual {
return nil, fmt.Errorf("%d:%d: expected =", p.tok.Position.Line, p.tok.Position.Column) return nil, fmt.Errorf("%d:%d: expected =", tok.Position.Line, tok.Position.Column)
} }
// Disambiguate based on RHS
nextTok := p.peek()
if nextTok.Type == TokenLBrace {
// Check if it looks like a Subnode (contains definitions) or Array (contains values)
if p.isSubnodeLookahead() {
sub, err := p.parseSubnode()
if err != nil {
return nil, err
}
return &ObjectNode{
Position: tok.Position,
Name: name,
Subnode: sub,
}, nil
}
}
// Default to Field
val, err := p.parseValue() val, err := p.parseValue()
if err != nil { if err != nil {
return nil, err return nil, err
@@ -99,11 +119,12 @@ func (p *Parser) parseDefinition() (Definition, error) {
Name: name, Name: name,
Value: val, Value: val,
}, nil }, nil
case TokenObjectIdentifier: case TokenObjectIdentifier:
// node = subnode // node = subnode
name := tok.Value name := tok.Value
if p.next().Type != TokenEqual { if p.next().Type != TokenEqual {
return nil, fmt.Errorf("%d:%d: expected =", p.tok.Position.Line, p.tok.Position.Column) return nil, fmt.Errorf("%d:%d: expected =", tok.Position.Line, tok.Position.Column)
} }
sub, err := p.parseSubnode() sub, err := p.parseSubnode()
if err != nil { if err != nil {
@@ -119,6 +140,42 @@ func (p *Parser) parseDefinition() (Definition, error) {
} }
} }
func (p *Parser) isSubnodeLookahead() bool {
// We are before '{'.
// Look inside:
// peek(0) is '{'
// peek(1) is first token inside
t1 := p.peekN(1)
if t1.Type == TokenRBrace {
// {} -> Empty. Assume Array (Value) by default, unless forced?
// If we return false, it parses as ArrayValue.
// If user writes "Sig = {}", is it an empty signal?
// Empty array is more common for value.
// If "Sig" is a node, it should probably have content or use +Sig.
return false
}
if t1.Type == TokenIdentifier {
// Identifier inside.
// If followed by '=', it's a definition -> Subnode.
t2 := p.peekN(2)
if t2.Type == TokenEqual {
return true
}
// Identifier alone or followed by something else -> Reference/Value -> Array
return false
}
if t1.Type == TokenObjectIdentifier {
// +Node = ... -> Definition -> Subnode
return true
}
// Literals -> Array
return false
}
func (p *Parser) parseSubnode() (Subnode, error) { func (p *Parser) parseSubnode() (Subnode, error) {
tok := p.next() tok := p.next()
if tok.Type != TokenLBrace { if tok.Type != TokenLBrace {

View File

@@ -59,7 +59,6 @@ func (v *Validator) validateNode(node *index.ProjectNode) {
} }
// Check for mandatory Class if it's an object node (+/$) // Check for mandatory Class if it's an object node (+/$)
// Root node usually doesn't have a name or is implicit
if node.RealName != "" && (node.RealName[0] == '+' || node.RealName[0] == '$') { if node.RealName != "" && (node.RealName[0] == '+' || node.RealName[0] == '$') {
hasClass := false hasClass := false
hasType := false hasType := false
@@ -102,12 +101,75 @@ func (v *Validator) validateNode(node *index.ProjectNode) {
} }
} }
// Legacy/Compatibility method if needed, but we prefer ValidateProject func (v *Validator) CheckUnused() {
func (v *Validator) Validate(file string, config *parser.Configuration) { referencedNodes := make(map[*index.ProjectNode]bool)
// No-op or local checks if any for _, ref := range v.Tree.References {
if ref.Target != nil {
referencedNodes[ref.Target] = true
}
}
v.checkUnusedRecursive(v.Tree.Root, referencedNodes)
} }
func (v *Validator) CheckUnused() { func (v *Validator) checkUnusedRecursive(node *index.ProjectNode, referenced map[*index.ProjectNode]bool) {
// To implement unused check, we'd need reference tracking in Index // Heuristic for GAM
// For now, focusing on duplicate fields and class validation if isGAM(node) {
if !referenced[node] {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelWarning,
Message: fmt.Sprintf("Unused GAM: %s is defined but not referenced in any thread or scheduler", node.RealName),
Position: v.getNodePosition(node),
File: v.getNodeFile(node),
})
}
}
// Heuristic for DataSource and its signals
if isDataSource(node) {
for _, signal := range node.Children {
if !referenced[signal] {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelWarning,
Message: fmt.Sprintf("Unused Signal: %s is defined in DataSource %s but never referenced", signal.RealName, node.RealName),
Position: v.getNodePosition(signal),
File: v.getNodeFile(signal),
})
}
}
}
for _, child := range node.Children {
v.checkUnusedRecursive(child, referenced)
}
}
func isGAM(node *index.ProjectNode) bool {
if node.RealName == "" || (node.RealName[0] != '+' && node.RealName[0] != '$') {
return false
}
_, hasInput := node.Children["InputSignals"]
_, hasOutput := node.Children["OutputSignals"]
return hasInput || hasOutput
}
func isDataSource(node *index.ProjectNode) bool {
if node.Parent != nil && node.Parent.Name == "Data" {
return true
}
return false
}
func (v *Validator) getNodePosition(node *index.ProjectNode) parser.Position {
if len(node.Fragments) > 0 {
return node.Fragments[0].ObjectPos
}
return parser.Position{Line: 1, Column: 1}
}
func (v *Validator) getNodeFile(node *index.ProjectNode) string {
if len(node.Fragments) > 0 {
return node.Fragments[0].File
}
return ""
} }

BIN
mdt

Binary file not shown.

View File

@@ -40,7 +40,8 @@ The LSP server should provide the following capabilities:
- **URI Symbols**: The symbols `+` and `$` used for object nodes are **not** written in the URI of the `#package` macro (e.g., use `PROJECT.NODE` even if the node is defined as `+NODE`). - **URI Symbols**: The symbols `+` and `$` used for object nodes are **not** written in the URI of the `#package` macro (e.g., use `PROJECT.NODE` even if the node is defined as `+NODE`).
- **Build Process**: - **Build Process**:
- The build tool merges all files sharing the same base namespace. - The build tool merges all files sharing the same base namespace.
- **Multi-File Nodes**: Nodes can be defined across multiple files. The build tool and validator must merge these definitions before processing. - **Multi-File Definitions**: Nodes and objects can be defined across multiple files. The build tool, validator, and LSP must merge these definitions (including all fields and sub-nodes) from the entire project to create a unified view before processing or validating.
- **Global References**: References to nodes, signals, or objects can point to definitions located in any file within the project.
- **Merging Order**: For objects defined across multiple files, the **first file** to be considered is the one containing the `Class` field definition. - **Merging Order**: For objects defined across multiple files, the **first file** to be considered is the one containing the `Class` field definition.
- **Field Order**: Within a single file, the relative order of defined fields must be maintained. - **Field Order**: Within a single file, the relative order of defined fields must be maintained.
- The LSP indexes only files belonging to the same project/namespace scope. - The LSP indexes only files belonging to the same project/namespace scope.
@@ -75,7 +76,7 @@ The LSP server should provide the following capabilities:
### Semantics ### Semantics
- **Nodes (`+` / `$`)**: The prefixes `+` and `$` indicate that the node represents an object. - **Nodes (`+` / `$`)**: The prefixes `+` and `$` indicate that the node represents an object.
- **Constraint**: These nodes _must_ contain a field named `Class` within their subnode definition. - **Constraint**: These nodes _must_ contain a field named `Class` within their subnode definition (across all files where the node is defined).
- **Signals**: Signals are considered nodes but **not** objects. They do not require a `Class` field. - **Signals**: Signals are considered nodes but **not** objects. They do not require a `Class` field.
- **Pragmas (`//!`)**: Used to suppress specific diagnostics. The developer can use these to explain why a rule is being ignored. - **Pragmas (`//!`)**: Used to suppress specific diagnostics. The developer can use these to explain why a rule is being ignored.
- **Structure**: A configuration is composed by one or more definitions. - **Structure**: A configuration is composed by one or more definitions.
@@ -134,6 +135,9 @@ The tool must build an index of the configuration to support LSP features and va
### Validation Rules ### Validation Rules
- **Consistency**: The `lsp`, `check`, and `build` commands **must share the same validation engine** to ensure consistent results across all tools. - **Consistency**: The `lsp`, `check`, and `build` commands **must share the same validation engine** to ensure consistent results across all tools.
- **Global Validation Context**:
- All validation steps must operate on the aggregated view of the project.
- A node's validity is determined by the combination of all its fields and sub-nodes defined across all project files.
- **Class Validation**: - **Class Validation**:
- For each known `Class`, the validator checks: - For each known `Class`, the validator checks:
- **Mandatory Fields**: Verification that all required fields are present. - **Mandatory Fields**: Verification that all required fields are present.
@@ -144,7 +148,7 @@ The tool must build an index of the configuration to support LSP features and va
- Class validation rules must be defined in a separate schema file. - Class validation rules must be defined in a separate schema file.
- **Project-Specific Classes**: Developers can define their own project-specific classes and corresponding validation rules, expanding the validation capabilities for their specific needs. - **Project-Specific Classes**: Developers can define their own project-specific classes and corresponding validation rules, expanding the validation capabilities for their specific needs.
- **Duplicate Fields**: - **Duplicate Fields**:
- **Constraint**: A field must not be defined more than once within the same object/node scope. - **Constraint**: A field must not be defined more than once within the same object/node scope, even if those definitions are spread across different files.
- **Multi-File Consideration**: Validation must account for nodes being defined across multiple files (merged) when checking for duplicates. - **Multi-File Consideration**: Validation must account for nodes being defined across multiple files (merged) when checking for duplicates.
### Formatting Rules ### Formatting Rules

View File

@@ -1,7 +1,7 @@
#package TEST.FMT #package TEST.FMT
// Detached comment // Detached comment
//# Test
+Node = { +Node = {
Class = "MyClass" Class = "MyClass"