diff --git a/cmd/mdt/main.go b/cmd/mdt/main.go new file mode 100644 index 0000000..4d1b2d5 --- /dev/null +++ b/cmd/mdt/main.go @@ -0,0 +1,113 @@ +package main + +import ( + "fmt" + "io/ioutil" + "os" + + "github.com/marte-dev/marte-dev-tools/internal/builder" + "github.com/marte-dev/marte-dev-tools/internal/index" + "github.com/marte-dev/marte-dev-tools/internal/lsp" + "github.com/marte-dev/marte-dev-tools/internal/parser" + "github.com/marte-dev/marte-dev-tools/internal/validator" +) + +func main() { + if len(os.Args) < 2 { + fmt.Println("Usage: mdt [arguments]") + fmt.Println("Commands: lsp, build, check, fmt") + os.Exit(1) + } + + command := os.Args[1] + switch command { + case "lsp": + runLSP() + case "build": + runBuild(os.Args[2:]) + case "check": + runCheck(os.Args[2:]) + case "fmt": + runFmt() + default: + fmt.Printf("Unknown command: %s\n", command) + os.Exit(1) + } +} + +func runLSP() { + lsp.RunServer() +} + +func runBuild(args []string) { + if len(args) < 1 { + fmt.Println("Usage: mdt build ") + os.Exit(1) + } + + outputDir := "build" + os.MkdirAll(outputDir, 0755) + + b := builder.NewBuilder(args) + err := b.Build(outputDir) + if err != nil { + fmt.Printf("Build failed: %v\n", err) + os.Exit(1) + } + fmt.Println("Build successful. Output in", outputDir) +} + +func runCheck(args []string) { + if len(args) < 1 { + fmt.Println("Usage: mdt check ") + os.Exit(1) + } + + idx := index.NewIndex() + configs := make(map[string]*parser.Configuration) + + for _, file := range args { + content, err := ioutil.ReadFile(file) + if err != nil { + fmt.Printf("Error reading %s: %v\n", file, err) + continue + } + + p := parser.NewParser(string(content)) + config, err := p.Parse() + if err != nil { + fmt.Printf("%s: Grammar error: %v\n", file, err) + continue + } + + configs[file] = config + idx.IndexConfig(file, config) + } + + idx.ResolveReferences() + v := validator.NewValidator(idx) + + for file, config := range configs { + v.Validate(file, config) + } + v.CheckUnused() + + for _, diag := range v.Diagnostics { + level := "ERROR" + if diag.Level == validator.LevelWarning { + level = "WARNING" + } + fmt.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message) + } + + if len(v.Diagnostics) > 0 { + fmt.Printf("\nFound %d issues.\n", len(v.Diagnostics)) + } else { + fmt.Println("No issues found.") + } +} + +func runFmt() { + fmt.Println("Formatting files...") + // TODO: Implement fmt +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..c6f6ec6 --- /dev/null +++ b/go.mod @@ -0,0 +1,3 @@ +module github.com/marte-dev/marte-dev-tools + +go 1.25.6 diff --git a/internal/builder/builder.go b/internal/builder/builder.go new file mode 100644 index 0000000..f3c8095 --- /dev/null +++ b/internal/builder/builder.go @@ -0,0 +1,111 @@ +package builder + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/marte-dev/marte-dev-tools/internal/parser" +) + +type Builder struct { + Files []string +} + +func NewBuilder(files []string) *Builder { + return &Builder{Files: files} +} + +func (b *Builder) Build(outputDir string) error { + packages := make(map[string]*parser.Configuration) + + for _, file := range b.Files { + content, err := ioutil.ReadFile(file) + if err != nil { + return err + } + + p := parser.NewParser(string(content)) + config, err := p.Parse() + if err != nil { + return fmt.Errorf("error parsing %s: %v", file, err) + } + + pkgURI := "" + if config.Package != nil { + pkgURI = config.Package.URI + } + + if existing, ok := packages[pkgURI]; ok { + existing.Definitions = append(existing.Definitions, config.Definitions...) + } else { + packages[pkgURI] = config + } + } + + for pkg, config := range packages { + if pkg == "" { + continue // Or handle global package + } + + outputPath := filepath.Join(outputDir, pkg+".marte") + err := b.writeConfig(outputPath, config) + if err != nil { + return err + } + } + + return nil +} + +func (b *Builder) writeConfig(path string, config *parser.Configuration) error { + f, err := os.Create(path) + if err != nil { + return err + } + defer f.Close() + + for _, def := range config.Definitions { + b.writeDefinition(f, def, 0) + } + return nil +} + +func (b *Builder) writeDefinition(f *os.File, def parser.Definition, indent int) { + indentStr := strings.Repeat(" ", indent) + switch d := def.(type) { + case *parser.Field: + fmt.Fprintf(f, "%s%s = %s\n", indentStr, d.Name, b.formatValue(d.Value)) + case *parser.ObjectNode: + fmt.Fprintf(f, "%s%s = {\n", indentStr, d.Name) + for _, subDef := range d.Subnode.Definitions { + b.writeDefinition(f, subDef, indent+1) + } + fmt.Fprintf(f, "%s}\n", indentStr) + } +} + +func (b *Builder) formatValue(val parser.Value) string { + switch v := val.(type) { + case *parser.StringValue: + return fmt.Sprintf("\"%s\"", v.Value) + case *parser.IntValue: + return v.Raw + case *parser.FloatValue: + return v.Raw + case *parser.BoolValue: + return fmt.Sprintf("%v", v.Value) + case *parser.ReferenceValue: + return v.Value + case *parser.ArrayValue: + elements := []string{} + for _, e := range v.Elements { + elements = append(elements, b.formatValue(e)) + } + return fmt.Sprintf("{%s}", strings.Join(elements, " ")) + default: + return "" + } +} diff --git a/internal/index/index.go b/internal/index/index.go new file mode 100644 index 0000000..7224d9e --- /dev/null +++ b/internal/index/index.go @@ -0,0 +1,125 @@ +package index + +import ( + "github.com/marte-dev/marte-dev-tools/internal/parser" +) + +type SymbolType int + +const ( + SymbolObject SymbolType = iota + SymbolSignal + SymbolDataSource + SymbolGAM +) + +type Symbol struct { + Name string + Type SymbolType + Position parser.Position + File string + Doc string + Class string + Parent *Symbol +} + +type Reference struct { + Name string + Position parser.Position + File string + Target *Symbol +} + +type Index struct { + Symbols map[string]*Symbol + References []Reference + Packages map[string][]string // pkgURI -> list of files +} + +func NewIndex() *Index { + return &Index{ + Symbols: make(map[string]*Symbol), + Packages: make(map[string][]string), + } +} + +func (idx *Index) IndexConfig(file string, config *parser.Configuration) { + pkgURI := "" + if config.Package != nil { + pkgURI = config.Package.URI + } + idx.Packages[pkgURI] = append(idx.Packages[pkgURI], file) + + for _, def := range config.Definitions { + idx.indexDefinition(file, "", nil, def) + } +} + +func (idx *Index) indexDefinition(file string, path string, parent *Symbol, def parser.Definition) { + switch d := def.(type) { + case *parser.ObjectNode: + name := d.Name + fullPath := name + if path != "" { + fullPath = path + "." + name + } + + class := "" + for _, subDef := range d.Subnode.Definitions { + if f, ok := subDef.(*parser.Field); ok && f.Name == "Class" { + if s, ok := f.Value.(*parser.StringValue); ok { + class = s.Value + } else if r, ok := f.Value.(*parser.ReferenceValue); ok { + class = r.Value + } + } + } + + symType := SymbolObject + // Simple heuristic for GAM or DataSource if class name matches or node name starts with +/$ + // In a real implementation we would check the class against known MARTe classes + + sym := &Symbol{ + Name: fullPath, + Type: symType, + Position: d.Position, + File: file, + Class: class, + Parent: parent, + } + idx.Symbols[fullPath] = sym + + for _, subDef := range d.Subnode.Definitions { + idx.indexDefinition(file, fullPath, sym, subDef) + } + + case *parser.Field: + idx.indexValue(file, d.Value) + } +} + +func (idx *Index) indexValue(file string, val parser.Value) { + switch v := val.(type) { + case *parser.ReferenceValue: + idx.References = append(idx.References, Reference{ + Name: v.Value, + Position: v.Position, + File: file, + }) + case *parser.ArrayValue: + for _, elem := range v.Elements { + idx.indexValue(file, elem) + } + } +} + +func (idx *Index) ResolveReferences() { + for i := range idx.References { + ref := &idx.References[i] + if sym, ok := idx.Symbols[ref.Name]; ok { + ref.Target = sym + } else { + // Try relative resolution? + } + } +} \ No newline at end of file diff --git a/internal/lsp/server.go b/internal/lsp/server.go new file mode 100644 index 0000000..ec11d48 --- /dev/null +++ b/internal/lsp/server.go @@ -0,0 +1,109 @@ +package lsp + +import ( + "bufio" + "encoding/json" + "fmt" + "io" + "os" +) + +type JsonRpcMessage struct { + Jsonrpc string `json:"jsonrpc"` + Method string `json:"method,omitempty"` + Params json.RawMessage `json:"params,omitempty"` + ID interface{} `json:"id,omitempty"` + Result interface{} `json:"result,omitempty"` + Error *JsonRpcError `json:"error,omitempty"` +} + +type JsonRpcError struct { + Code int `json:"code"` + Message string `json:"message"` +} + +func RunServer() { + reader := bufio.NewReader(os.Stdin) + for { + msg, err := readMessage(reader) + if err != nil { + if err == io.EOF { + break + } + fmt.Fprintf(os.Stderr, "Error reading message: %v\n", err) + continue + } + + handleMessage(msg) + } +} + +func readMessage(reader *bufio.Reader) (*JsonRpcMessage, error) { + // LSP uses Content-Length header + var contentLength int + for { + line, err := reader.ReadString('\n') + if err != nil { + return nil, err + } + if line == "\r\n" { + break + } + if _, err := fmt.Sscanf(line, "Content-Length: %d", &contentLength); err == nil { + continue + } + } + + body := make([]byte, contentLength) + _, err := io.ReadFull(reader, body) + if err != nil { + return nil, err + } + + var msg JsonRpcMessage + err = json.Unmarshal(body, &msg) + return &msg, err +} + +func handleMessage(msg *JsonRpcMessage) { + switch msg.Method { + case "initialize": + respond(msg.ID, map[string]interface{}{ + "capabilities": map[string]interface{}{ + "textDocumentSync": 1, // Full sync + "hoverProvider": true, + "definitionProvider": true, + "referencesProvider": true, + "completionProvider": map[string]interface{}{ + "triggerCharacters": []string{"=", ".", "{", "+", "$"}, + }, + }, + }) + case "initialized": + // Do nothing + case "shutdown": + respond(msg.ID, nil) + case "exit": + os.Exit(0) + case "textDocument/didOpen": + // Handle file open + case "textDocument/didChange": + // Handle file change + case "textDocument/hover": + // Handle hover + } +} + +func respond(id interface{}, result interface{}) { + msg := JsonRpcMessage{ + Jsonrpc: "2.0", + ID: id, + Result: result, + } + send(msg) +} + +func send(msg interface{}) { + body, _ := json.Marshal(msg) + fmt.Printf("Content-Length: %d\r\n\r\n%s", len(body), body) +} diff --git a/internal/parser/ast.go b/internal/parser/ast.go new file mode 100644 index 0000000..ac0e766 --- /dev/null +++ b/internal/parser/ast.go @@ -0,0 +1,116 @@ +package parser + +type Node interface { + Pos() Position +} + +type Position struct { + Line int + Column int +} + +type Configuration struct { + Definitions []Definition + Package *Package +} + +type Definition interface { + Node + isDefinition() +} + +type Field struct { + Position Position + Name string + Value Value +} + +func (f *Field) Pos() Position { return f.Position } +func (f *Field) isDefinition() {} + +type ObjectNode struct { + Position Position + Name string // includes + or $ + Subnode Subnode +} + +func (o *ObjectNode) Pos() Position { return o.Position } +func (o *ObjectNode) isDefinition() {} + +type Subnode struct { + Position Position + Definitions []Definition +} + +type Value interface { + Node + isValue() +} + +type StringValue struct { + Position Position + Value string +} + +func (v *StringValue) Pos() Position { return v.Position } +func (v *StringValue) isValue() {} + +type IntValue struct { + Position Position + Value int64 + Raw string +} + +func (v *IntValue) Pos() Position { return v.Position } +func (v *IntValue) isValue() {} + +type FloatValue struct { + Position Position + Value float64 + Raw string +} + +func (v *FloatValue) Pos() Position { return v.Position } +func (v *FloatValue) isValue() {} + +type BoolValue struct { + Position Position + Value bool +} + +func (v *BoolValue) Pos() Position { return v.Position } +func (v *BoolValue) isValue() {} + +type ReferenceValue struct { + Position Position + Value string +} + +func (v *ReferenceValue) Pos() Position { return v.Position } +func (v *ReferenceValue) isValue() {} + +type ArrayValue struct { + Position Position + Elements []Value +} + +func (v *ArrayValue) Pos() Position { return v.Position } +func (v *ArrayValue) isValue() {} + +type Package struct { + Position Position + URI string +} + +func (p *Package) Pos() Position { return p.Position } + +type Comment struct { + Position Position + Text string + Doc bool // true if starts with //# +} + +type Pragma struct { + Position Position + Text string +} diff --git a/internal/parser/lexer.go b/internal/parser/lexer.go new file mode 100644 index 0000000..300a1ee --- /dev/null +++ b/internal/parser/lexer.go @@ -0,0 +1,233 @@ +package parser + +import ( + "unicode" + "unicode/utf8" +) + +type TokenType int + +const ( + TokenError TokenType = iota + TokenEOF + TokenIdentifier + TokenObjectIdentifier // +$ + TokenEqual + TokenLBrace + TokenRBrace + TokenString + TokenNumber + TokenBool + TokenPackage + TokenPragma + TokenComment + TokenDocstring +) + +type Token struct { + Type TokenType + Value string + Position Position +} + +type Lexer struct { + input string + start int + pos int + width int + line int + lineStart int +} + +func NewLexer(input string) *Lexer { + return &Lexer{ + input: input, + line: 1, + } +} + +func (l *Lexer) next() rune { + if l.pos >= len(l.input) { + l.width = 0 + return -1 + } + r, w := utf8.DecodeRuneInString(l.input[l.pos:]) + l.width = w + l.pos += l.width + if r == '\n' { + l.line++ + l.lineStart = l.pos + } + return r +} + +func (l *Lexer) backup() { + l.pos -= l.width + if l.width > 0 { + r, _ := utf8.DecodeRuneInString(l.input[l.pos:]) + if r == '\n' { + l.line-- + // This is tricky, we'd need to find the previous line start + // For simplicity, let's just not backup over newlines or handle it better + } + } +} + +func (l *Lexer) peek() rune { + r := l.next() + l.backup() + return r +} + +func (l *Lexer) emit(t TokenType) Token { + tok := Token{ + Type: t, + Value: l.input[l.start:l.pos], + Position: Position{ + Line: l.line, + Column: l.start - l.lineStart + 1, + }, + } + l.start = l.pos + return tok +} + +func (l *Lexer) NextToken() Token { + for { + r := l.next() + if r == -1 { + return l.emit(TokenEOF) + } + + if unicode.IsSpace(r) { + l.start = l.pos + continue + } + + switch r { + case '=': + return l.emit(TokenEqual) + case '{': + return l.emit(TokenLBrace) + case '}': + return l.emit(TokenRBrace) + case '"': + return l.lexString() + case '/': + return l.lexComment() + case '#': + return l.lexPackage() + case '!': + // Might be part of pragma //! + // But grammar says pragma is //! + // So it should start with // + case '+': + fallthrough + case '$': + return l.lexObjectIdentifier() + } + + if unicode.IsLetter(r) { + return l.lexIdentifier() + } + + if unicode.IsDigit(r) || r == '-' { + return l.lexNumber() + } + + return l.emit(TokenError) + } +} + +func (l *Lexer) lexIdentifier() Token { + for { + r := l.next() + if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '-' { + continue + } + l.backup() + val := l.input[l.start:l.pos] + if val == "true" || val == "false" { + return l.emit(TokenBool) + } + return l.emit(TokenIdentifier) + } +} + +func (l *Lexer) lexObjectIdentifier() Token { + for { + r := l.next() + if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '-' { + continue + } + l.backup() + return l.emit(TokenObjectIdentifier) + } +} + +func (l *Lexer) lexString() Token { + for { + r := l.next() + if r == '"' { + return l.emit(TokenString) + } + if r == -1 { + return l.emit(TokenError) + } + } +} + +func (l *Lexer) lexNumber() Token { + // Simple number lexing, could be improved for hex, binary, float + for { + r := l.next() + if unicode.IsDigit(r) || r == '.' || r == 'x' || r == 'b' || r == 'e' || r == '-' { + continue + } + l.backup() + return l.emit(TokenNumber) + } +} + +func (l *Lexer) lexComment() Token { + r := l.next() + if r == '/' { + // It's a comment, docstring or pragma + r = l.next() + if r == '#' { + return l.lexUntilNewline(TokenDocstring) + } + if r == '!' { + return l.lexUntilNewline(TokenPragma) + } + return l.lexUntilNewline(TokenComment) + } + l.backup() + return l.emit(TokenError) +} + +func (l *Lexer) lexUntilNewline(t TokenType) Token { + for { + r := l.next() + if r == '\n' || r == -1 { + return l.emit(t) + } + } +} + +func (l *Lexer) lexPackage() Token { + // #package + l.start = l.pos - 1 // Include '#' + for { + r := l.next() + if unicode.IsLetter(r) { + continue + } + l.backup() + break + } + if l.input[l.start:l.pos] == "#package" { + return l.lexUntilNewline(TokenPackage) + } + return l.emit(TokenError) +} diff --git a/internal/parser/parser.go b/internal/parser/parser.go new file mode 100644 index 0000000..35e0bef --- /dev/null +++ b/internal/parser/parser.go @@ -0,0 +1,176 @@ +package parser + +import ( + "fmt" + "strconv" + "strings" +) + +type Parser struct { + lexer *Lexer + tok Token + peeked bool +} + +func NewParser(input string) *Parser { + return &Parser{ + lexer: NewLexer(input), + } +} + +func (p *Parser) next() Token { + if p.peeked { + p.peeked = false + return p.tok + } + p.tok = p.lexer.NextToken() + return p.tok +} + +func (p *Parser) peek() Token { + if p.peeked { + return p.tok + } + p.tok = p.lexer.NextToken() + p.peeked = true + return p.tok +} + +func (p *Parser) Parse() (*Configuration, error) { + config := &Configuration{} + for { + tok := p.peek() + if tok.Type == TokenEOF { + break + } + if tok.Type == TokenPackage { + p.next() + config.Package = &Package{ + Position: tok.Position, + URI: strings.TrimSpace(strings.TrimPrefix(tok.Value, "#package")), + } + continue + } + + // Skip comments, pragmas, docstrings for now in AST + if tok.Type == TokenComment || tok.Type == TokenDocstring || tok.Type == TokenPragma { + p.next() + continue + } + + def, err := p.parseDefinition() + if err != nil { + return nil, err + } + config.Definitions = append(config.Definitions, def) + } + return config, nil +} + +func (p *Parser) parseDefinition() (Definition, error) { + tok := p.next() + switch tok.Type { + case TokenIdentifier: + // field = value + name := tok.Value + if p.next().Type != TokenEqual { + return nil, fmt.Errorf("%d:%d: expected =", p.tok.Position.Line, p.tok.Position.Column) + } + val, err := p.parseValue() + if err != nil { + return nil, err + } + return &Field{ + Position: tok.Position, + Name: name, + Value: val, + }, nil + case TokenObjectIdentifier: + // node = subnode + name := tok.Value + if p.next().Type != TokenEqual { + return nil, fmt.Errorf("%d:%d: expected =", p.tok.Position.Line, p.tok.Position.Column) + } + sub, err := p.parseSubnode() + if err != nil { + return nil, err + } + return &ObjectNode{ + Position: tok.Position, + Name: name, + Subnode: sub, + }, nil + default: + return nil, fmt.Errorf("%d:%d: unexpected token %v", tok.Position.Line, tok.Position.Column, tok.Value) + } +} + +func (p *Parser) parseSubnode() (Subnode, error) { + tok := p.next() + if tok.Type != TokenLBrace { + return Subnode{}, fmt.Errorf("%d:%d: expected {", tok.Position.Line, tok.Position.Column) + } + sub := Subnode{Position: tok.Position} + for { + t := p.peek() + if t.Type == TokenRBrace { + p.next() + break + } + if t.Type == TokenEOF { + return sub, fmt.Errorf("%d:%d: unexpected EOF, expected }", t.Position.Line, t.Position.Column) + } + if t.Type == TokenComment || t.Type == TokenDocstring || t.Type == TokenPragma { + p.next() + continue + } + def, err := p.parseDefinition() + if err != nil { + return sub, err + } + sub.Definitions = append(sub.Definitions, def) + } + return sub, nil +} + +func (p *Parser) parseValue() (Value, error) { + tok := p.next() + switch tok.Type { + case TokenString: + return &StringValue{ + Position: tok.Position, + Value: strings.Trim(tok.Value, "\""), + }, nil + case TokenNumber: + // Simplistic handling + if strings.Contains(tok.Value, ".") || strings.Contains(tok.Value, "e") { + f, _ := strconv.ParseFloat(tok.Value, 64) + return &FloatValue{Position: tok.Position, Value: f, Raw: tok.Value}, nil + } + i, _ := strconv.ParseInt(tok.Value, 0, 64) + return &IntValue{Position: tok.Position, Value: i, Raw: tok.Value}, nil + case TokenBool: + return &BoolValue{Position: tok.Position, Value: tok.Value == "true"}, nil + case TokenIdentifier: + // reference? + return &ReferenceValue{Position: tok.Position, Value: tok.Value}, nil + case TokenLBrace: + // array + arr := &ArrayValue{Position: tok.Position} + for { + t := p.peek() + if t.Type == TokenRBrace { + p.next() + break + } + val, err := p.parseValue() + if err != nil { + return nil, err + } + arr.Elements = append(arr.Elements, val) + } + return arr, nil + default: + return nil, fmt.Errorf("%d:%d: unexpected value token %v", tok.Position.Line, tok.Position.Column, tok.Value) + } +} diff --git a/internal/parser/parser_test.go b/internal/parser/parser_test.go new file mode 100644 index 0000000..83d6833 --- /dev/null +++ b/internal/parser/parser_test.go @@ -0,0 +1,38 @@ +package parser + +import ( + "testing" +) + +func TestParseBasic(t *testing.T) { + input := ` +#package PROJECT.SUB +// comment ++Node1 = { + Class = MyClass + Field1 = "value" + Field2 = 123 + Field3 = true + +SubNode = { + Class = OtherClass + } +} +$Node2 = { + Class = AppClass + Array = {1 2 3} +} +` + p := NewParser(input) + config, err := p.Parse() + if err != nil { + t.Fatalf("Parse error: %v", err) + } + + if config.Package == nil || config.Package.URI != "PROJECT.SUB" { + t.Errorf("Expected package PROJECT.SUB, got %v", config.Package) + } + + if len(config.Definitions) != 2 { + t.Errorf("Expected 2 definitions, got %d", len(config.Definitions)) + } +} diff --git a/internal/validator/validator.go b/internal/validator/validator.go new file mode 100644 index 0000000..204220d --- /dev/null +++ b/internal/validator/validator.go @@ -0,0 +1,95 @@ +package validator + +import ( + "fmt" + "github.com/marte-dev/marte-dev-tools/internal/parser" + "github.com/marte-dev/marte-dev-tools/internal/index" +) + +type DiagnosticLevel int + +const ( + LevelError DiagnosticLevel = iota + LevelWarning +) + +type Diagnostic struct { + Level DiagnosticLevel + Message string + Position parser.Position + File string +} + +type Validator struct { + Diagnostics []Diagnostic + Index *index.Index +} + +func NewValidator(idx *index.Index) *Validator { + return &Validator{Index: idx} +} + +func (v *Validator) Validate(file string, config *parser.Configuration) { + for _, def := range config.Definitions { + v.validateDefinition(file, "", config, def) + } +} + +func (v *Validator) validateDefinition(file string, path string, config *parser.Configuration, def parser.Definition) { + switch d := def.(type) { + case *parser.ObjectNode: + name := d.Name + fullPath := name + if path != "" { + fullPath = path + "." + name + } + + // Check for mandatory 'Class' field for +/$ nodes + if d.Name != "" && (d.Name[0] == '+' || d.Name[0] == '$') { + hasClass := false + for _, subDef := range d.Subnode.Definitions { + if f, ok := subDef.(*parser.Field); ok && f.Name == "Class" { + hasClass = true + break + } + } + if !hasClass { + v.Diagnostics = append(v.Diagnostics, Diagnostic{ + Level: LevelError, + Message: fmt.Sprintf("Node %s is an object and must contain a 'Class' field", d.Name), + Position: d.Position, + File: file, + }) + } + } + + // GAM specific validation + // (This is a placeholder, real logic would check if it's a GAM) + + for _, subDef := range d.Subnode.Definitions { + v.validateDefinition(file, fullPath, config, subDef) + } + } +} + +func (v *Validator) CheckUnused() { + if v.Index == nil { + return + } + + referencedSymbols := make(map[*index.Symbol]bool) + for _, ref := range v.Index.References { + if ref.Target != nil { + referencedSymbols[ref.Target] = true + } + } + + for _, sym := range v.Index.Symbols { + // Heuristic: if it's a GAM or Signal, check if referenced + // (Refining this later with proper class checks) + if !referencedSymbols[sym] { + // Logic to determine if it should be warned as unused + // e.g. if sym.Class is a GAM or if it's a signal in a DataSource + } + } +} diff --git a/mdt b/mdt new file mode 100755 index 0000000..9b4d6ab Binary files /dev/null and b/mdt differ diff --git a/test_error.marte b/test_error.marte new file mode 100644 index 0000000..6fe8521 --- /dev/null +++ b/test_error.marte @@ -0,0 +1,3 @@ ++NodeWithoutClass = { + Field = 1 +}