Compare commits

...

3 Commits

Author SHA1 Message Date
Martino Ferrari
cb79d490e7 Initial support to variables and to producer/consumer logic 2026-01-28 18:25:48 +01:00
Martino Ferrari
b8d45f276d initial working on variables and consumer/producer logic 2026-01-28 17:59:29 +01:00
Martino Ferrari
03fe7d33b0 added variables and producer check 2026-01-28 17:50:49 +01:00
18 changed files with 879 additions and 51 deletions

View File

@@ -3,6 +3,8 @@ package main
import ( import (
"bytes" "bytes"
"os" "os"
"path/filepath"
"strings"
"github.com/marte-community/marte-dev-tools/internal/builder" "github.com/marte-community/marte-dev-tools/internal/builder"
"github.com/marte-community/marte-dev-tools/internal/formatter" "github.com/marte-community/marte-dev-tools/internal/formatter"
@@ -17,10 +19,6 @@ func main() {
if len(os.Args) < 2 { if len(os.Args) < 2 {
logger.Println("Usage: mdt <command> [arguments]") logger.Println("Usage: mdt <command> [arguments]")
logger.Println("Commands: lsp, build, check, fmt, init") logger.Println("Commands: lsp, build, check, fmt, init")
logger.Println(" build [-o output_file] <input_files...>")
logger.Println(" check <input_files...>")
logger.Println(" fmt <input_files...>")
logger.Println(" init <project_name>")
os.Exit(1) os.Exit(1)
} }
@@ -47,46 +45,47 @@ func runLSP() {
} }
func runBuild(args []string) { func runBuild(args []string) {
if len(args) < 1 { files := []string{}
logger.Println("Usage: mdt build [-o output_file] <input_files...>") overrides := make(map[string]string)
os.Exit(1) outputFile := ""
}
var outputFilePath string
var inputFiles []string
for i := 0; i < len(args); i++ { for i := 0; i < len(args); i++ {
if args[i] == "-o" { arg := args[i]
if strings.HasPrefix(arg, "-v") {
pair := arg[2:]
parts := strings.SplitN(pair, "=", 2)
if len(parts) == 2 {
overrides[parts[0]] = parts[1]
}
} else if arg == "-o" {
if i+1 < len(args) { if i+1 < len(args) {
outputFilePath = args[i+1] outputFile = args[i+1]
i++ i++
} else {
logger.Println("Error: -o requires a file path")
os.Exit(1)
} }
} else { } else {
inputFiles = append(inputFiles, args[i]) files = append(files, arg)
} }
} }
if len(inputFiles) < 1 { if len(files) < 1 {
logger.Println("Usage: mdt build [-o output_file] <input_files...>") logger.Println("Usage: mdt build [-o output] [-vVAR=VAL] <input_files...>")
os.Exit(1) os.Exit(1)
} }
output := os.Stdout b := builder.NewBuilder(files, overrides)
if outputFilePath != "" {
f, err := os.Create(outputFilePath) var out *os.File = os.Stdout
if outputFile != "" {
f, err := os.Create(outputFile)
if err != nil { if err != nil {
logger.Printf("Error creating output file %s: %v\n", outputFilePath, err) logger.Printf("Error creating output file: %v\n", err)
os.Exit(1) os.Exit(1)
} }
defer f.Close() defer f.Close()
output = f out = f
} }
b := builder.NewBuilder(inputFiles) err := b.Build(out)
err := b.Build(output)
if err != nil { if err != nil {
logger.Printf("Build failed: %v\n", err) logger.Printf("Build failed: %v\n", err)
os.Exit(1) os.Exit(1)
@@ -175,23 +174,61 @@ func runInit(args []string) {
} }
projectName := args[0] projectName := args[0]
if err := os.MkdirAll("src", 0755); err != nil { if err := os.MkdirAll(filepath.Join(projectName, "src"), 0755); err != nil {
logger.Fatalf("Error creating project directories: %v", err) logger.Fatalf("Error creating project directories: %v", err)
} }
files := map[string]string{ files := map[string]string{
"Makefile": "MDT=mdt\n\nall: check build\n\ncheck:\n\t$(MDT) check src/*.marte\n\nbuild:\n\t$(MDT) build -o app.marte src/*.marte\n\nfmt:\n\t$(MDT) fmt src/*.marte\n", "Makefile": `MDT=mdt
".marte_schema.cue": "package schema\n\n#Classes: {\n // Add your project-specific classes here\n}\n",
"src/app.marte": "#package " + projectName + "\n\n+App = {\n Class = RealTimeApplication\n +Data = {\n Class = ReferenceContainer\n }\n +Functions = {\n Class = ReferenceContainer\n }\n +States = {\n Class = ReferenceContainer\n }\n +Scheduler = {\n Class = GAMScheduler\n TimingDataSource = TimingDataSource\n }\n}\n", all: check build
"src/data.marte": "#package " + projectName + ".App.Data\n\n// Define your DataSources here\nDefaultDataSource = DDB\n//# Default DB\n+DDB = {\n Class=GAMDataSource\n}\n//# Timing Data Source to track threads timings\n+TimingDataSource = {\n Class = TimingDataSource\n}",
"src/functions.marte": "#package " + projectName + ".App.Functions\n\n// Define your GAMs here\n", check:
$(MDT) check src/*.marte
build:
$(MDT) build -o app.marte src/*.marte
fmt:
$(MDT) fmt src/*.marte
`,
".marte_schema.cue": `package schema
#Classes: {
// Add your project-specific classes here
}
`,
"src/app.marte": `#package App
+Main = {
Class = RealTimeApplication
+States = {
Class = ReferenceContainer
+Run = {
Class = RealTimeState
+MainThread = {
Class = RealTimeThread
Functions = {}
}
}
}
+Data = {
Class = ReferenceContainer
}
}
`,
"src/components.marte": `#package App.Data
// Define your DataSources here
`,
} }
for path, content := range files { for path, content := range files {
if err := os.WriteFile(path, []byte(content), 0644); err != nil { fullPath := filepath.Join(projectName, path)
logger.Fatalf("Error creating file %s: %v", path, err) if err := os.WriteFile(fullPath, []byte(content), 0644); err != nil {
logger.Fatalf("Error creating file %s: %v", fullPath, err)
} }
logger.Printf("Created %s\n", path) logger.Printf("Created %s\n", fullPath)
} }
logger.Printf("Project '%s' initialized successfully.\n", projectName) logger.Printf("Project '%s' initialized successfully.\n", projectName)

View File

@@ -76,6 +76,8 @@ GAMs declare inputs and outputs. You can refer to signals directly or alias them
### Threading Rules ### Threading Rules
**Validation Rule**: A DataSource that is **not** marked as multithreaded (default) cannot be used by GAMs running in different threads within the same State. **Validation Rule**: A DataSource that is **not** marked as multithreaded (default) cannot be used by GAMs running in different threads within the same State.
**Ordering Rule**: For `INOUT` signals (data dependency within a thread), the Producer GAM must appear **before** the Consumer GAM in the thread's `Functions` list. This ensures correct data flow within the cycle. This rule is skipped if the DataSource is marked as `multithreaded: true`.
To allow sharing, the DataSource class in the schema must have `#meta: multithreaded: true`. To allow sharing, the DataSource class in the schema must have `#meta: multithreaded: true`.
## 3. Schemas and Validation ## 3. Schemas and Validation
@@ -160,3 +162,27 @@ If validation is too strict, you can suppress warnings using pragmas (`//!`).
Type = int32 Type = int32
} }
``` ```
## 6. Variables
You can define variables using `#var`. The type expression supports CUE syntax.
```marte
#var MyVar: uint32 = 100
#var Env: "PROD" | "DEV" = "DEV"
```
### Usage
Reference a variable using `$`:
```marte
Field = $MyVar
```
### Build Override
You can override variable values during build:
```bash
mdt build -vMyVar=200 -vEnv="PROD" src/*.marte
```

View File

@@ -11,11 +11,13 @@ import (
) )
type Builder struct { type Builder struct {
Files []string Files []string
Overrides map[string]string
variables map[string]parser.Value
} }
func NewBuilder(files []string) *Builder { func NewBuilder(files []string, overrides map[string]string) *Builder {
return &Builder{Files: files} return &Builder{Files: files, Overrides: overrides, variables: make(map[string]parser.Value)}
} }
func (b *Builder) Build(f *os.File) error { func (b *Builder) Build(f *os.File) error {
@@ -56,6 +58,22 @@ func (b *Builder) Build(f *os.File) error {
tree.AddFile(file, config) tree.AddFile(file, config)
} }
b.collectVariables(tree)
if expectedProject == "" {
for _, iso := range tree.IsolatedFiles {
tree.Root.Fragments = append(tree.Root.Fragments, iso.Fragments...)
for name, child := range iso.Children {
if existing, ok := tree.Root.Children[name]; ok {
b.mergeNodes(existing, child)
} else {
tree.Root.Children[name] = child
child.Parent = tree.Root
}
}
}
}
// Determine root node to print // Determine root node to print
rootNode := tree.Root rootNode := tree.Root
if expectedProject != "" { if expectedProject != "" {
@@ -102,6 +120,8 @@ func (b *Builder) writeNodeBody(f *os.File, node *index.ProjectNode, indent int)
switch d := def.(type) { switch d := def.(type) {
case *parser.Field: case *parser.Field:
b.writeDefinition(f, d, indent) b.writeDefinition(f, d, indent)
case *parser.VariableDefinition:
continue
case *parser.ObjectNode: case *parser.ObjectNode:
norm := index.NormalizeName(d.Name) norm := index.NormalizeName(d.Name)
if child, ok := node.Children[norm]; ok { if child, ok := node.Children[norm]; ok {
@@ -150,6 +170,12 @@ func (b *Builder) formatValue(val parser.Value) string {
return v.Raw return v.Raw
case *parser.BoolValue: case *parser.BoolValue:
return fmt.Sprintf("%v", v.Value) return fmt.Sprintf("%v", v.Value)
case *parser.VariableReferenceValue:
name := strings.TrimPrefix(v.Name, "$")
if val, ok := b.variables[name]; ok {
return b.formatValue(val)
}
return v.Name
case *parser.ReferenceValue: case *parser.ReferenceValue:
return v.Value return v.Value
case *parser.ArrayValue: case *parser.ArrayValue:
@@ -163,6 +189,18 @@ func (b *Builder) formatValue(val parser.Value) string {
} }
} }
func (b *Builder) mergeNodes(dest, src *index.ProjectNode) {
dest.Fragments = append(dest.Fragments, src.Fragments...)
for name, child := range src.Children {
if existing, ok := dest.Children[name]; ok {
b.mergeNodes(existing, child)
} else {
dest.Children[name] = child
child.Parent = dest
}
}
}
func hasClass(frag *index.Fragment) bool { func hasClass(frag *index.Fragment) bool {
for _, def := range frag.Definitions { for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok && f.Name == "Class" { if f, ok := def.(*parser.Field); ok && f.Name == "Class" {
@@ -171,3 +209,28 @@ func hasClass(frag *index.Fragment) bool {
} }
return false return false
} }
func (b *Builder) collectVariables(tree *index.ProjectTree) {
processNode := func(n *index.ProjectNode) {
for _, frag := range n.Fragments {
for _, def := range frag.Definitions {
if vdef, ok := def.(*parser.VariableDefinition); ok {
if valStr, ok := b.Overrides[vdef.Name]; ok {
p := parser.NewParser("Temp = " + valStr)
cfg, _ := p.Parse()
if len(cfg.Definitions) > 0 {
if f, ok := cfg.Definitions[0].(*parser.Field); ok {
b.variables[vdef.Name] = f.Value
continue
}
}
}
if vdef.DefaultValue != nil {
b.variables[vdef.Name] = vdef.DefaultValue
}
}
}
}
}
tree.Walk(processNode)
}

View File

@@ -104,6 +104,14 @@ func (f *Formatter) formatDefinition(def parser.Definition, indent int) int {
fmt.Fprintf(f.writer, "%s}", indentStr) fmt.Fprintf(f.writer, "%s}", indentStr)
return d.Subnode.EndPosition.Line return d.Subnode.EndPosition.Line
case *parser.VariableDefinition:
fmt.Fprintf(f.writer, "%s#var %s: %s", indentStr, d.Name, d.TypeExpr)
if d.DefaultValue != nil {
fmt.Fprint(f.writer, " = ")
endLine := f.formatValue(d.DefaultValue, indent)
return endLine
}
return d.Position.Line
} }
return 0 return 0
} }
@@ -142,6 +150,9 @@ func (f *Formatter) formatValue(val parser.Value, indent int) int {
case *parser.ReferenceValue: case *parser.ReferenceValue:
fmt.Fprint(f.writer, v.Value) fmt.Fprint(f.writer, v.Value)
return v.Position.Line return v.Position.Line
case *parser.VariableReferenceValue:
fmt.Fprint(f.writer, v.Name)
return v.Position.Line
case *parser.ArrayValue: case *parser.ArrayValue:
fmt.Fprint(f.writer, "{ ") fmt.Fprint(f.writer, "{ ")
for i, e := range v.Elements { for i, e := range v.Elements {

View File

@@ -8,12 +8,18 @@ import (
"github.com/marte-community/marte-dev-tools/internal/parser" "github.com/marte-community/marte-dev-tools/internal/parser"
) )
type VariableInfo struct {
Def *parser.VariableDefinition
File string
}
type ProjectTree struct { type ProjectTree struct {
Root *ProjectNode Root *ProjectNode
References []Reference References []Reference
IsolatedFiles map[string]*ProjectNode IsolatedFiles map[string]*ProjectNode
GlobalPragmas map[string][]string GlobalPragmas map[string][]string
NodeMap map[string][]*ProjectNode NodeMap map[string][]*ProjectNode
Variables map[string]VariableInfo
} }
func (pt *ProjectTree) ScanDirectory(rootPath string) error { func (pt *ProjectTree) ScanDirectory(rootPath string) error {
@@ -37,10 +43,11 @@ func (pt *ProjectTree) ScanDirectory(rootPath string) error {
} }
type Reference struct { type Reference struct {
Name string Name string
Position parser.Position Position parser.Position
File string File string
Target *ProjectNode // Resolved target Target *ProjectNode
TargetVariable *parser.VariableDefinition
} }
type ProjectNode struct { type ProjectNode struct {
@@ -72,6 +79,7 @@ func NewProjectTree() *ProjectTree {
}, },
IsolatedFiles: make(map[string]*ProjectNode), IsolatedFiles: make(map[string]*ProjectNode),
GlobalPragmas: make(map[string][]string), GlobalPragmas: make(map[string][]string),
Variables: make(map[string]VariableInfo),
} }
} }
@@ -219,6 +227,9 @@ func (pt *ProjectTree) populateNode(node *ProjectNode, file string, config *pars
case *parser.Field: case *parser.Field:
fileFragment.Definitions = append(fileFragment.Definitions, d) fileFragment.Definitions = append(fileFragment.Definitions, d)
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
case *parser.VariableDefinition:
fileFragment.Definitions = append(fileFragment.Definitions, d)
pt.Variables[d.Name] = VariableInfo{Def: d, File: file}
case *parser.ObjectNode: case *parser.ObjectNode:
fileFragment.Definitions = append(fileFragment.Definitions, d) fileFragment.Definitions = append(fileFragment.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
@@ -274,6 +285,9 @@ func (pt *ProjectTree) addObjectFragment(node *ProjectNode, file string, obj *pa
frag.Definitions = append(frag.Definitions, d) frag.Definitions = append(frag.Definitions, d)
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
pt.extractFieldMetadata(node, d) pt.extractFieldMetadata(node, d)
case *parser.VariableDefinition:
frag.Definitions = append(frag.Definitions, d)
pt.Variables[d.Name] = VariableInfo{Def: d, File: file}
case *parser.ObjectNode: case *parser.ObjectNode:
frag.Definitions = append(frag.Definitions, d) frag.Definitions = append(frag.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
@@ -379,6 +393,12 @@ func (pt *ProjectTree) indexValue(file string, val parser.Value) {
Position: v.Position, Position: v.Position,
File: file, File: file,
}) })
case *parser.VariableReferenceValue:
pt.References = append(pt.References, Reference{
Name: strings.TrimPrefix(v.Name, "$"),
Position: v.Position,
File: file,
})
case *parser.ArrayValue: case *parser.ArrayValue:
for _, elem := range v.Elements { for _, elem := range v.Elements {
pt.indexValue(file, elem) pt.indexValue(file, elem)
@@ -401,6 +421,12 @@ func (pt *ProjectTree) ResolveReferences() {
pt.RebuildIndex() pt.RebuildIndex()
for i := range pt.References { for i := range pt.References {
ref := &pt.References[i] ref := &pt.References[i]
if v, ok := pt.Variables[ref.Name]; ok {
ref.TargetVariable = v.Def
continue
}
if isoNode, ok := pt.IsolatedFiles[ref.File]; ok { if isoNode, ok := pt.IsolatedFiles[ref.File]; ok {
ref.Target = pt.FindNode(isoNode, ref.Name, nil) ref.Target = pt.FindNode(isoNode, ref.Name, nil)
} else { } else {
@@ -479,6 +505,7 @@ type QueryResult struct {
Node *ProjectNode Node *ProjectNode
Field *parser.Field Field *parser.Field
Reference *Reference Reference *Reference
Variable *parser.VariableDefinition
} }
func (pt *ProjectTree) Query(file string, line, col int) *QueryResult { func (pt *ProjectTree) Query(file string, line, col int) *QueryResult {
@@ -528,6 +555,10 @@ func (pt *ProjectTree) queryNode(node *ProjectNode, file string, line, col int)
if line == f.Position.Line && col >= f.Position.Column && col < f.Position.Column+len(f.Name) { if line == f.Position.Line && col >= f.Position.Column && col < f.Position.Column+len(f.Name) {
return &QueryResult{Field: f} return &QueryResult{Field: f}
} }
} else if v, ok := def.(*parser.VariableDefinition); ok {
if line == v.Position.Line {
return &QueryResult{Variable: v}
}
} }
} }
} }

View File

@@ -591,6 +591,8 @@ func HandleHover(params HoverParams) *Hover {
} }
} else if res.Field != nil { } else if res.Field != nil {
content = fmt.Sprintf("**Field**: `%s`", res.Field.Name) content = fmt.Sprintf("**Field**: `%s`", res.Field.Name)
} else if res.Variable != nil {
content = fmt.Sprintf("**Variable**: `%s`\nType: `%s`", res.Variable.Name, res.Variable.TypeExpr)
} else if res.Reference != nil { } else if res.Reference != nil {
targetName := "Unresolved" targetName := "Unresolved"
fullInfo := "" fullInfo := ""
@@ -600,6 +602,10 @@ func HandleHover(params HoverParams) *Hover {
targetName = res.Reference.Target.RealName targetName = res.Reference.Target.RealName
targetDoc = res.Reference.Target.Doc targetDoc = res.Reference.Target.Doc
fullInfo = formatNodeInfo(res.Reference.Target) fullInfo = formatNodeInfo(res.Reference.Target)
} else if res.Reference.TargetVariable != nil {
v := res.Reference.TargetVariable
targetName = v.Name
fullInfo = fmt.Sprintf("**Variable**: `%s`\nType: `%s`", v.Name, v.TypeExpr)
} }
content = fmt.Sprintf("**Reference**: `%s` -> `%s`", res.Reference.Name, targetName) content = fmt.Sprintf("**Reference**: `%s` -> `%s`", res.Reference.Name, targetName)

View File

@@ -125,3 +125,21 @@ type Pragma struct {
} }
func (p *Pragma) Pos() Position { return p.Position } func (p *Pragma) Pos() Position { return p.Position }
type VariableDefinition struct {
Position Position
Name string
TypeExpr string
DefaultValue Value
}
func (v *VariableDefinition) Pos() Position { return v.Position }
func (v *VariableDefinition) isDefinition() {}
type VariableReferenceValue struct {
Position Position
Name string
}
func (v *VariableReferenceValue) Pos() Position { return v.Position }
func (v *VariableReferenceValue) isValue() {}

View File

@@ -23,6 +23,11 @@ const (
TokenComment TokenComment
TokenDocstring TokenDocstring
TokenComma TokenComma
TokenColon
TokenPipe
TokenLBracket
TokenRBracket
TokenSymbol
) )
type Token struct { type Token struct {
@@ -124,6 +129,16 @@ func (l *Lexer) NextToken() Token {
return l.emit(TokenRBrace) return l.emit(TokenRBrace)
case ',': case ',':
return l.emit(TokenComma) return l.emit(TokenComma)
case ':':
return l.emit(TokenColon)
case '|':
return l.emit(TokenPipe)
case '[':
return l.emit(TokenLBracket)
case ']':
return l.emit(TokenRBracket)
case '&', '?', '!', '<', '>', '*', '(', ')':
return l.emit(TokenSymbol)
case '"': case '"':
return l.lexString() return l.lexString()
case '/': case '/':
@@ -151,7 +166,7 @@ func (l *Lexer) NextToken() Token {
func (l *Lexer) lexIdentifier() Token { func (l *Lexer) lexIdentifier() Token {
for { for {
r := l.next() r := l.next()
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '-' || r == '.' || r == ':' { if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '-' || r == '.' {
continue continue
} }
l.backup() l.backup()
@@ -247,7 +262,7 @@ func (l *Lexer) lexHashIdentifier() Token {
// We are at '#', l.start is just before it // We are at '#', l.start is just before it
for { for {
r := l.next() r := l.next()
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '-' || r == '.' || r == ':' || r == '#' { if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '-' || r == '.' || r == '#' {
continue continue
} }
l.backup() l.backup()

View File

@@ -101,6 +101,9 @@ func (p *Parser) parseDefinition() (Definition, bool) {
switch tok.Type { switch tok.Type {
case TokenIdentifier: case TokenIdentifier:
name := tok.Value name := tok.Value
if name == "#var" {
return p.parseVariableDefinition(tok)
}
if p.peek().Type != TokenEqual { if p.peek().Type != TokenEqual {
p.addError(tok.Position, "expected =") p.addError(tok.Position, "expected =")
return nil, false return nil, false
@@ -244,6 +247,8 @@ func (p *Parser) parseValue() (Value, bool) {
true true
case TokenIdentifier: case TokenIdentifier:
return &ReferenceValue{Position: tok.Position, Value: tok.Value}, true return &ReferenceValue{Position: tok.Position, Value: tok.Value}, true
case TokenObjectIdentifier:
return &VariableReferenceValue{Position: tok.Position, Name: tok.Value}, true
case TokenLBrace: case TokenLBrace:
arr := &ArrayValue{Position: tok.Position} arr := &ArrayValue{Position: tok.Position}
for { for {
@@ -269,3 +274,53 @@ func (p *Parser) parseValue() (Value, bool) {
return nil, false return nil, false
} }
} }
func (p *Parser) parseVariableDefinition(startTok Token) (Definition, bool) {
nameTok := p.next()
if nameTok.Type != TokenIdentifier {
p.addError(nameTok.Position, "expected variable name")
return nil, false
}
if p.next().Type != TokenColon {
p.addError(nameTok.Position, "expected :")
return nil, false
}
var typeTokens []Token
startLine := nameTok.Position.Line
for {
t := p.peek()
if t.Position.Line > startLine || t.Type == TokenEOF {
break
}
if t.Type == TokenEqual {
break
}
typeTokens = append(typeTokens, p.next())
}
typeExpr := ""
for _, t := range typeTokens {
typeExpr += t.Value + " "
}
var defVal Value
if p.peek().Type == TokenEqual {
p.next()
val, ok := p.parseValue()
if ok {
defVal = val
} else {
return nil, false
}
}
return &VariableDefinition{
Position: startTok.Position,
Name: nameTok.Value,
TypeExpr: strings.TrimSpace(typeExpr),
DefaultValue: defVal,
}, true
}

View File

@@ -413,7 +413,7 @@ package schema
OPCUA: {...} OPCUA: {...}
SysLogger: {...} SysLogger: {...}
GAMDataSource: { GAMDataSource: {
#meta: multithreaded: bool | *false #meta: multithreaded: false
#meta: direction: "INOUT" #meta: direction: "INOUT"
#meta: type: "datasource" #meta: type: "datasource"
... ...
@@ -421,7 +421,7 @@ package schema
} }
#Meta: { #Meta: {
direction?: "IN" | "OUT" | "INOUT" direction?: "IN" | "OUT" | "INOUT"
multithreaded?: bool multithreaded?: bool
... ...
} }
@@ -430,7 +430,7 @@ package schema
// It must have a Class field. // It must have a Class field.
// Based on Class, it validates against #Classes. // Based on Class, it validates against #Classes.
#Object: { #Object: {
Class: string Class: string
"#meta"?: #Meta "#meta"?: #Meta
// Allow any other field by default (extensibility), // Allow any other field by default (extensibility),
// unless #Classes definition is closed. // unless #Classes definition is closed.

View File

@@ -55,6 +55,8 @@ func (v *Validator) ValidateProject() {
} }
v.CheckUnused() v.CheckUnused()
v.CheckDataSourceThreading() v.CheckDataSourceThreading()
v.CheckINOUTOrdering()
v.CheckVariables()
} }
func (v *Validator) validateNode(node *index.ProjectNode) { func (v *Validator) validateNode(node *index.ProjectNode) {
@@ -884,3 +886,218 @@ func (v *Validator) isMultithreaded(ds *index.ProjectNode) bool {
} }
return false return false
} }
func (v *Validator) CheckINOUTOrdering() {
if v.Tree.Root == nil {
return
}
var appNode *index.ProjectNode
findApp := func(n *index.ProjectNode) {
if cls, ok := n.Metadata["Class"]; ok && cls == "RealTimeApplication" {
appNode = n
}
}
v.Tree.Walk(findApp)
if appNode == nil {
return
}
var statesNode *index.ProjectNode
if s, ok := appNode.Children["States"]; ok {
statesNode = s
} else {
for _, child := range appNode.Children {
if cls, ok := child.Metadata["Class"]; ok && cls == "StateMachine" {
statesNode = child
break
}
}
}
if statesNode == nil {
return
}
for _, state := range statesNode.Children {
var threads []*index.ProjectNode
for _, child := range state.Children {
if child.RealName == "Threads" {
for _, t := range child.Children {
if cls, ok := t.Metadata["Class"]; ok && cls == "RealTimeThread" {
threads = append(threads, t)
}
}
} else {
if cls, ok := child.Metadata["Class"]; ok && cls == "RealTimeThread" {
threads = append(threads, child)
}
}
}
for _, thread := range threads {
producedSignals := make(map[*index.ProjectNode]map[string][]*index.ProjectNode)
consumedSignals := make(map[*index.ProjectNode]map[string]bool)
gams := v.getThreadGAMs(thread)
for _, gam := range gams {
v.processGAMSignalsForOrdering(gam, "InputSignals", producedSignals, consumedSignals, true, thread, state)
v.processGAMSignalsForOrdering(gam, "OutputSignals", producedSignals, consumedSignals, false, thread, state)
}
// Check for produced but not consumed
for ds, signals := range producedSignals {
for sigName, producers := range signals {
consumed := false
if cSet, ok := consumedSignals[ds]; ok {
if cSet[sigName] {
consumed = true
}
}
if !consumed {
for _, prod := range producers {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelWarning,
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is produced in thread '%s' but never consumed in the same thread.", sigName, ds.RealName, thread.RealName),
Position: v.getNodePosition(prod),
File: v.getNodeFile(prod),
})
}
}
}
}
}
}
}
func (v *Validator) processGAMSignalsForOrdering(gam *index.ProjectNode, containerName string, produced map[*index.ProjectNode]map[string][]*index.ProjectNode, consumed map[*index.ProjectNode]map[string]bool, isInput bool, thread, state *index.ProjectNode) {
container := gam.Children[containerName]
if container == nil {
return
}
for _, sig := range container.Children {
fields := v.getFields(sig)
var dsNode *index.ProjectNode
var sigName string
if sig.Target != nil {
if sig.Target.Parent != nil && sig.Target.Parent.Parent != nil {
dsNode = sig.Target.Parent.Parent
sigName = sig.Target.RealName
}
}
if dsNode == nil {
if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 {
dsName := v.getFieldValue(dsFields[0])
dsNode = v.resolveReference(dsName, v.getNodeFile(sig), isDataSource)
}
if aliasFields, ok := fields["Alias"]; ok && len(aliasFields) > 0 {
sigName = v.getFieldValue(aliasFields[0])
} else {
sigName = sig.RealName
}
}
if dsNode == nil || sigName == "" {
continue
}
sigName = index.NormalizeName(sigName)
if v.isMultithreaded(dsNode) {
continue
}
dir := v.getDataSourceDirection(dsNode)
if dir != "INOUT" {
continue
}
if isInput {
isProduced := false
if set, ok := produced[dsNode]; ok {
if len(set[sigName]) > 0 {
isProduced = true
}
}
if !isProduced {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is consumed by GAM '%s' in thread '%s' (State '%s') before being produced by any previous GAM.", sigName, dsNode.RealName, gam.RealName, thread.RealName, state.RealName),
Position: v.getNodePosition(sig),
File: v.getNodeFile(sig),
})
}
if consumed[dsNode] == nil {
consumed[dsNode] = make(map[string]bool)
}
consumed[dsNode][sigName] = true
} else {
if produced[dsNode] == nil {
produced[dsNode] = make(map[string][]*index.ProjectNode)
}
produced[dsNode][sigName] = append(produced[dsNode][sigName], sig)
}
}
}
func (v *Validator) getDataSourceDirection(ds *index.ProjectNode) string {
cls := v.getNodeClass(ds)
if cls == "" {
return ""
}
if v.Schema == nil {
return ""
}
path := cue.ParsePath(fmt.Sprintf("#Classes.%s.#meta.direction", cls))
val := v.Schema.Value.LookupPath(path)
if val.Err() == nil {
s, _ := val.String()
return s
}
return ""
}
func (v *Validator) CheckVariables() {
if v.Schema == nil {
return
}
ctx := v.Schema.Context
for _, info := range v.Tree.Variables {
def := info.Def
// Compile Type
typeVal := ctx.CompileString(def.TypeExpr)
if typeVal.Err() != nil {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Invalid type expression for variable '%s': %v", def.Name, typeVal.Err()),
Position: def.Position,
File: info.File,
})
continue
}
if def.DefaultValue != nil {
valInterface := v.valueToInterface(def.DefaultValue)
valVal := ctx.Encode(valInterface)
// Unify
res := typeVal.Unify(valVal)
if err := res.Validate(cue.Concrete(true)); err != nil {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Variable '%s' value mismatch: %v", def.Name, err),
Position: def.Position,
File: info.File,
})
}
}
}
}

View File

@@ -32,7 +32,7 @@ FieldB = 20
os.WriteFile("build_multi_test/f2.marte", []byte(f2Content), 0644) os.WriteFile("build_multi_test/f2.marte", []byte(f2Content), 0644)
// Execute Build // Execute Build
b := builder.NewBuilder([]string{"build_multi_test/f1.marte", "build_multi_test/f2.marte"}) b := builder.NewBuilder([]string{"build_multi_test/f1.marte", "build_multi_test/f2.marte"}, nil)
// Prepare output file // Prepare output file
// Should be +MyObj.marte (normalized MyObj.marte) - Actually checking content // Should be +MyObj.marte (normalized MyObj.marte) - Actually checking content

View File

@@ -0,0 +1,45 @@
package integration
import (
"bytes"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/formatter"
"github.com/marte-community/marte-dev-tools/internal/parser"
)
func TestFormatterVariables(t *testing.T) {
content := `
#var MyInt: int = 10
#var MyStr: string | "A" = "default"
+Obj = {
Field1 = $MyInt
Field2 = $MyStr
}
`
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
var buf bytes.Buffer
formatter.Format(cfg, &buf)
output := buf.String()
// Parser reconstructs type expression with spaces
if !strings.Contains(output, "#var MyInt: int = 10") {
t.Errorf("Variable MyInt formatted incorrectly. Got:\n%s", output)
}
// Note: parser adds space after each token in TypeExpr
// string | "A" -> "string | \"A\""
if !strings.Contains(output, "#var MyStr: string | \"A\" = \"default\"") {
t.Errorf("Variable MyStr formatted incorrectly. Got:\n%s", output)
}
if !strings.Contains(output, "Field1 = $MyInt") {
t.Errorf("Variable reference $MyInt formatted incorrectly. Got:\n%s", output)
}
}

View File

@@ -168,7 +168,7 @@ func TestBuildCommand(t *testing.T) {
// Test Merge // Test Merge
files := []string{"integration/build_merge_1.marte", "integration/build_merge_2.marte"} files := []string{"integration/build_merge_1.marte", "integration/build_merge_2.marte"}
b := builder.NewBuilder(files) b := builder.NewBuilder(files, nil)
outputFile, err := os.Create("build_test/TEST.marte") outputFile, err := os.Create("build_test/TEST.marte")
if err != nil { if err != nil {
@@ -195,7 +195,7 @@ func TestBuildCommand(t *testing.T) {
// Test Order (Class First) // Test Order (Class First)
filesOrder := []string{"integration/build_order_1.marte", "integration/build_order_2.marte"} filesOrder := []string{"integration/build_order_1.marte", "integration/build_order_2.marte"}
bOrder := builder.NewBuilder(filesOrder) bOrder := builder.NewBuilder(filesOrder, nil)
outputFileOrder, err := os.Create("build_test/ORDER.marte") outputFileOrder, err := os.Create("build_test/ORDER.marte")
if err != nil { if err != nil {

73
test/lsp_inout_test.go Normal file
View File

@@ -0,0 +1,73 @@
package integration
import (
"bytes"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/lsp"
"github.com/marte-community/marte-dev-tools/internal/schema"
)
func TestLSPINOUTOrdering(t *testing.T) {
lsp.Tree = index.NewProjectTree()
lsp.Documents = make(map[string]string)
// Mock schema if necessary, but we rely on internal schema
lsp.GlobalSchema = schema.LoadFullSchema(".")
var buf bytes.Buffer
lsp.Output = &buf
content := `
+App = {
Class = RealTimeApplication
+Data = {
Class = ReferenceContainer
+DDB = {
Class = GAMDataSource
}
}
+Functions = {
Class = ReferenceContainer
+A = {
Class = IOGAM
InputSignals = {
A = {
DataSource = DDB
Type = uint32
}
}
OutputSignals = {
B = {
DataSource = DDB
Type = uint32
}
}
}
}
+States = {
Class = ReferenceContainer
+State = {
Class =RealTimeState
Threads = {
+Th1 = {
Class = RealTimeThread
Functions = {A}
}
}
}
}
}
`
uri := "file://app.marte"
lsp.HandleDidOpen(lsp.DidOpenTextDocumentParams{
TextDocument: lsp.TextDocumentItem{URI: uri, Text: content},
})
output := buf.String()
if !strings.Contains(output, "INOUT Signal 'A'") {
t.Error("LSP did not report INOUT ordering error")
t.Log(output)
}
}

View File

@@ -0,0 +1,66 @@
package integration
import (
"bytes"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/lsp"
"github.com/marte-community/marte-dev-tools/internal/schema"
)
func TestLSPINOUTWarning(t *testing.T) {
lsp.Tree = index.NewProjectTree()
lsp.Documents = make(map[string]string)
lsp.GlobalSchema = schema.LoadFullSchema(".")
var buf bytes.Buffer
lsp.Output = &buf
content := `
+App = {
Class = RealTimeApplication
+Data = {
Class = ReferenceContainer
+DDB = {
Class = GAMDataSource
}
}
+Functions = {
Class = ReferenceContainer
+Producer = {
Class = IOGAM
OutputSignals = {
ProducedSig = {
DataSource = DDB
Type = uint32
}
}
}
}
+States = {
Class = ReferenceContainer
+State = {
Class =RealTimeState
Threads = {
+Th1 = {
Class = RealTimeThread
Functions = {Producer}
}
}
}
}
}
`
uri := "file://warning.marte"
lsp.HandleDidOpen(lsp.DidOpenTextDocumentParams{
TextDocument: lsp.TextDocumentItem{URI: uri, Text: content},
})
output := buf.String()
if !strings.Contains(output, "produced in thread '+Th1' but never consumed") {
t.Error("LSP did not report INOUT usage warning")
t.Log(output)
}
}

View File

@@ -0,0 +1,93 @@
package integration
import (
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestINOUTOrdering(t *testing.T) {
content := `
+Data = {
Class = ReferenceContainer
+MyDS = {
Class = GAMDataSource
#meta = { multithreaded = false } // Explicitly false
Signals = { Sig1 = { Type = uint32 } }
}
}
+GAM_Consumer = {
Class = IOGAM
InputSignals = {
Sig1 = { DataSource = MyDS Type = uint32 }
}
}
+GAM_Producer = {
Class = IOGAM
OutputSignals = {
Sig1 = { DataSource = MyDS Type = uint32 }
}
}
+App = {
Class = RealTimeApplication
+States = {
Class = ReferenceContainer
+State1 = {
Class = RealTimeState
+Thread1 = {
Class = RealTimeThread
Functions = { GAM_Consumer, GAM_Producer } // Fail
}
}
+State2 = {
Class = RealTimeState
+Thread2 = {
Class = RealTimeThread
Functions = { GAM_Producer, GAM_Consumer } // Pass
}
}
}
}
`
pt := index.NewProjectTree()
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatal(err)
}
pt.AddFile("main.marte", cfg)
// Use validator with default schema (embedded)
// We pass "." but it shouldn't matter if no .marte_schema.cue exists
v := validator.NewValidator(pt, ".")
v.ValidateProject()
foundError := false
for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "consumed by GAM '+GAM_Consumer'") &&
strings.Contains(d.Message, "before being produced") {
foundError = true
}
}
if !foundError {
t.Error("Expected INOUT ordering error for State1")
for _, d := range v.Diagnostics {
t.Logf("Diag: %s", d.Message)
}
}
foundErrorState2 := false
for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "State '+State2'") && strings.Contains(d.Message, "before being produced") {
foundErrorState2 = true
}
}
if foundErrorState2 {
t.Error("Unexpected INOUT ordering error for State2 (Correct order)")
}
}

72
test/variables_test.go Normal file
View File

@@ -0,0 +1,72 @@
package integration
import (
"os"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/builder"
"github.com/marte-community/marte-dev-tools/internal/parser"
)
func TestVariables(t *testing.T) {
content := `
#var MyInt: int = 10
#var MyStr: string = "default"
+Obj = {
Class = Test
Field1 = $MyInt
Field2 = $MyStr
}
`
// Test Parsing
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
// Check definitions: #var, #var, +Obj
if len(cfg.Definitions) != 3 {
t.Errorf("Expected 3 definitions, got %d", len(cfg.Definitions))
}
// Test Builder resolution
f, _ := os.CreateTemp("", "vars.marte")
f.WriteString(content)
f.Close()
defer os.Remove(f.Name())
// Build with override
overrides := map[string]string{
"MyInt": "999",
}
b := builder.NewBuilder([]string{f.Name()}, overrides)
outF, _ := os.CreateTemp("", "out.marte")
outName := outF.Name()
defer os.Remove(outName)
err = b.Build(outF)
outF.Close()
if err != nil {
t.Fatalf("Build failed: %v", err)
}
outContent, _ := os.ReadFile(outName)
outStr := string(outContent)
if !strings.Contains(outStr, "Field1 = 999") {
t.Errorf("Variable override failed for MyInt. Got:\n%s", outStr)
}
if !strings.Contains(outStr, "Field2 = \"default\"") {
t.Errorf("Default value failed for MyStr. Got:\n%s", outStr)
}
// Check #var is removed
if strings.Contains(outStr, "#var") {
t.Error("#var definition present in output")
}
}