Compare commits
6 Commits
c3f4d8f465
...
f121f7c15d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f121f7c15d | ||
|
|
b4d3edab9d | ||
|
|
ee9674a7bc | ||
|
|
d98593e67b | ||
|
|
a55c4b9c7c | ||
|
|
6fa67abcb4 |
2
Makefile
2
Makefile
@@ -10,7 +10,7 @@ build:
|
||||
go build -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/mdt
|
||||
|
||||
test:
|
||||
go test -v ./...
|
||||
go test -v ./test/...
|
||||
|
||||
coverage:
|
||||
go test -cover -coverprofile=coverage.out ./test/... -coverpkg=./internal/...
|
||||
|
||||
@@ -72,6 +72,45 @@ func runBuild(args []string) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// 1. Run Validation
|
||||
tree := index.NewProjectTree()
|
||||
for _, file := range files {
|
||||
content, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
logger.Printf("Error reading %s: %v\n", file, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
p := parser.NewParser(string(content))
|
||||
config, err := p.Parse()
|
||||
if err != nil {
|
||||
logger.Printf("%s: Grammar error: %v\n", file, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
tree.AddFile(file, config)
|
||||
}
|
||||
|
||||
v := validator.NewValidator(tree, ".")
|
||||
v.ValidateProject()
|
||||
|
||||
hasErrors := false
|
||||
for _, diag := range v.Diagnostics {
|
||||
level := "ERROR"
|
||||
if diag.Level == validator.LevelWarning {
|
||||
level = "WARNING"
|
||||
} else {
|
||||
hasErrors = true
|
||||
}
|
||||
logger.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message)
|
||||
}
|
||||
|
||||
if hasErrors {
|
||||
logger.Println("Build failed due to validation errors.")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// 2. Perform Build
|
||||
b := builder.NewBuilder(files, overrides)
|
||||
|
||||
var out *os.File = os.Stdout
|
||||
@@ -99,6 +138,7 @@ func runCheck(args []string) {
|
||||
}
|
||||
|
||||
tree := index.NewProjectTree()
|
||||
syntaxErrors := 0
|
||||
|
||||
for _, file := range args {
|
||||
content, err := os.ReadFile(file)
|
||||
@@ -108,13 +148,17 @@ func runCheck(args []string) {
|
||||
}
|
||||
|
||||
p := parser.NewParser(string(content))
|
||||
config, err := p.Parse()
|
||||
if err != nil {
|
||||
logger.Printf("%s: Grammar error: %v\n", file, err)
|
||||
continue
|
||||
config, _ := p.Parse()
|
||||
if len(p.Errors()) > 0 {
|
||||
syntaxErrors += len(p.Errors())
|
||||
for _, e := range p.Errors() {
|
||||
logger.Printf("%s: Grammar error: %v\n", file, e)
|
||||
}
|
||||
}
|
||||
|
||||
tree.AddFile(file, config)
|
||||
if config != nil {
|
||||
tree.AddFile(file, config)
|
||||
}
|
||||
}
|
||||
|
||||
v := validator.NewValidator(tree, ".")
|
||||
@@ -128,8 +172,9 @@ func runCheck(args []string) {
|
||||
logger.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message)
|
||||
}
|
||||
|
||||
if len(v.Diagnostics) > 0 {
|
||||
logger.Printf("\nFound %d issues.\n", len(v.Diagnostics))
|
||||
totalIssues := len(v.Diagnostics) + syntaxErrors
|
||||
if totalIssues > 0 {
|
||||
logger.Printf("\nFound %d issues.\n", totalIssues)
|
||||
} else {
|
||||
logger.Println("No issues found.")
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ The brain of the system. It maintains a holistic view of the project.
|
||||
* **ProjectTree**: The central data structure. It holds the root of the configuration hierarchy (`Root`), references, and isolated files.
|
||||
* **ProjectNode**: Represents a logical node in the configuration. Since a node can be defined across multiple files (fragments), `ProjectNode` aggregates these fragments. It also stores locally defined variables in its `Variables` map.
|
||||
* **NodeMap**: A hash map index (`map[string][]*ProjectNode`) for $O(1)$ symbol lookups, optimizing `FindNode` operations.
|
||||
* **Reference Resolution**: The `ResolveReferences` method links `Reference` objects to their target `ProjectNode` or `VariableDefinition`. It uses `resolveScopedName` to respect lexical scoping rules, searching up the hierarchy from the reference's container.
|
||||
* **Reference Resolution**: The `ResolveReferences` method links `Reference` objects to their target `ProjectNode` or `VariableDefinition`. It uses `ResolveName` (exported) which respects lexical scoping rules by searching the hierarchy upwards from the reference's container, using `FindNode` for deep searches within each scope.
|
||||
|
||||
### 3. `internal/validator`
|
||||
|
||||
@@ -100,12 +100,13 @@ Manages CUE schemas.
|
||||
5. Diagnostics are printed (CLI) or published via `textDocument/publishDiagnostics` (LSP).
|
||||
|
||||
### Threading Check Logic
|
||||
1. Finds the `RealTimeApplication` node.
|
||||
2. Iterates through `States` and `Threads`.
|
||||
3. For each Thread, resolves the `Functions` (GAMs).
|
||||
4. For each GAM, resolves connected `DataSources` via Input/Output signals.
|
||||
5. Maps `DataSource -> Thread` within the context of a State.
|
||||
6. If a DataSource is seen in >1 Thread, it checks the `#meta.multithreaded` property. If false (default), an error is raised.
|
||||
1. Iterates all `RealTimeApplication` nodes found in the project.
|
||||
2. For each App:
|
||||
1. Finds `States` and `Threads`.
|
||||
2. For each Thread, resolves the `Functions` (GAMs).
|
||||
3. For each GAM, resolves connected `DataSources` via Input/Output signals.
|
||||
4. Maps `DataSource -> Thread` within the context of a State.
|
||||
5. If a DataSource is seen in >1 Thread, it checks the `#meta.multithreaded` property. If false (default), an error is raised.
|
||||
|
||||
### INOUT Ordering Logic
|
||||
1. Iterates Threads.
|
||||
|
||||
@@ -173,9 +173,11 @@ You can define variables using `#var`. The type expression supports CUE syntax.
|
||||
```
|
||||
|
||||
### Usage
|
||||
Reference a variable using `@`:
|
||||
Reference a variable using `$` (preferred) or `@`:
|
||||
|
||||
```marte
|
||||
Field = $MyVar
|
||||
// or
|
||||
Field = @MyVar
|
||||
```
|
||||
|
||||
@@ -187,7 +189,7 @@ You can use operators in field values. Supported operators:
|
||||
```marte
|
||||
Field1 = 10 + 20 * 2 // 50
|
||||
Field2 = "Hello " .. "World"
|
||||
Field3 = @MyVar + 5
|
||||
Field3 = $MyVar + 5
|
||||
```
|
||||
|
||||
### Build Override
|
||||
@@ -197,3 +199,21 @@ You can override variable values during build:
|
||||
mdt build -vMyVar=200 -vEnv="PROD" src/*.marte
|
||||
```
|
||||
|
||||
## 7. Validation Rules (Detail)
|
||||
|
||||
### Data Flow Validation
|
||||
`mdt` checks for logical data flow errors:
|
||||
- **Consumed before Produced**: If a GAM reads an INOUT signal that hasn't been written by a previous GAM in the same cycle, an error is reported.
|
||||
- **Produced but not Consumed**: If a GAM writes an INOUT signal that is never read by subsequent GAMs, a warning is reported.
|
||||
- **Initialization**: Providing a `Value` field in an `InputSignal` treats it as "produced" (initialized), resolving "Consumed before Produced" errors.
|
||||
|
||||
### Threading Rules
|
||||
A DataSource that is **not** marked as multithreaded (default) cannot be used by GAMs running in different threads within the same State.
|
||||
|
||||
To allow sharing, the DataSource class in the schema must have `#meta: multithreaded: true`.
|
||||
|
||||
### Implicit vs Explicit Signals
|
||||
- **Explicit**: Signal defined in `DataSource.Signals`.
|
||||
- **Implicit**: Signal used in GAM but not defined in DataSource. `mdt` reports a warning unless suppressed.
|
||||
|
||||
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
|
||||
`mdt` includes a Language Server Protocol (LSP) implementation that provides features like:
|
||||
|
||||
- Syntax highlighting and error reporting
|
||||
- Syntax highlighting and error reporting (Parser & Semantic)
|
||||
- Auto-completion
|
||||
- Go to Definition / References
|
||||
- Hover documentation
|
||||
- Symbol renaming
|
||||
- Incremental synchronization (Robust)
|
||||
|
||||
The LSP server is started via the command:
|
||||
|
||||
|
||||
2
go.mod
2
go.mod
@@ -1,6 +1,6 @@
|
||||
module github.com/marte-community/marte-dev-tools
|
||||
|
||||
go 1.25.6
|
||||
go 1.25
|
||||
|
||||
require cuelang.org/go v0.15.3
|
||||
|
||||
|
||||
@@ -45,17 +45,15 @@ func Format(config *parser.Configuration, w io.Writer) {
|
||||
}
|
||||
|
||||
func fixComment(text string) string {
|
||||
if strings.HasPrefix(text, "//!") {
|
||||
if len(text) > 3 && text[3] != ' ' {
|
||||
return "//! " + text[3:]
|
||||
}
|
||||
} else if strings.HasPrefix(text, "//#") {
|
||||
if len(text) > 3 && text[3] != ' ' {
|
||||
return "//# " + text[3:]
|
||||
}
|
||||
} else if strings.HasPrefix(text, "//") {
|
||||
if len(text) > 2 && text[2] != ' ' && text[2] != '#' && text[2] != '!' {
|
||||
return "// " + text[2:]
|
||||
if !strings.HasPrefix(text, "//!") {
|
||||
if strings.HasPrefix(text, "//#") {
|
||||
if len(text) > 3 && text[3] != ' ' {
|
||||
return "//# " + text[3:]
|
||||
}
|
||||
} else if strings.HasPrefix(text, "//") {
|
||||
if len(text) > 2 && text[2] != ' ' && text[2] != '#' && text[2] != '!' {
|
||||
return "// " + text[2:]
|
||||
}
|
||||
}
|
||||
}
|
||||
return text
|
||||
|
||||
@@ -435,7 +435,7 @@ func (pt *ProjectTree) ResolveReferences() {
|
||||
continue
|
||||
}
|
||||
|
||||
ref.Target = pt.resolveScopedName(container, ref.Name)
|
||||
ref.Target = pt.ResolveName(container, ref.Name, nil)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -617,51 +617,19 @@ func (pt *ProjectTree) findNodeContaining(node *ProjectNode, file string, pos pa
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pt *ProjectTree) resolveScopedName(ctx *ProjectNode, name string) *ProjectNode {
|
||||
func (pt *ProjectTree) ResolveName(ctx *ProjectNode, name string, predicate func(*ProjectNode) bool) *ProjectNode {
|
||||
if ctx == nil {
|
||||
return pt.FindNode(pt.Root, name, nil)
|
||||
return pt.FindNode(pt.Root, name, predicate)
|
||||
}
|
||||
|
||||
parts := strings.Split(name, ".")
|
||||
first := parts[0]
|
||||
normFirst := NormalizeName(first)
|
||||
|
||||
var startNode *ProjectNode
|
||||
curr := ctx
|
||||
|
||||
for curr != nil {
|
||||
if child, ok := curr.Children[normFirst]; ok {
|
||||
startNode = child
|
||||
break
|
||||
if found := pt.FindNode(curr, name, predicate); found != nil {
|
||||
return found
|
||||
}
|
||||
curr = curr.Parent
|
||||
}
|
||||
|
||||
if startNode == nil && ctx != pt.Root {
|
||||
if child, ok := pt.Root.Children[normFirst]; ok {
|
||||
startNode = child
|
||||
}
|
||||
}
|
||||
|
||||
if startNode == nil {
|
||||
// Fallback to deep search from context root
|
||||
root := ctx
|
||||
for root.Parent != nil {
|
||||
root = root.Parent
|
||||
}
|
||||
return pt.FindNode(root, name, nil)
|
||||
}
|
||||
|
||||
curr = startNode
|
||||
for i := 1; i < len(parts); i++ {
|
||||
norm := NormalizeName(parts[i])
|
||||
if child, ok := curr.Children[norm]; ok {
|
||||
curr = child
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return curr
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pt *ProjectTree) ResolveVariable(ctx *ProjectNode, name string) *VariableInfo {
|
||||
|
||||
@@ -336,13 +336,9 @@ func HandleDidOpen(params DidOpenTextDocumentParams) {
|
||||
path := uriToPath(params.TextDocument.URI)
|
||||
Documents[params.TextDocument.URI] = params.TextDocument.Text
|
||||
p := parser.NewParser(params.TextDocument.Text)
|
||||
config, err := p.Parse()
|
||||
config, _ := p.Parse()
|
||||
|
||||
if err != nil {
|
||||
publishParserError(params.TextDocument.URI, err)
|
||||
} else {
|
||||
publishParserError(params.TextDocument.URI, nil)
|
||||
}
|
||||
publishParserErrors(params.TextDocument.URI, p.Errors())
|
||||
|
||||
if config != nil {
|
||||
Tree.AddFile(path, config)
|
||||
@@ -369,13 +365,9 @@ func HandleDidChange(params DidChangeTextDocumentParams) {
|
||||
Documents[uri] = text
|
||||
path := uriToPath(uri)
|
||||
p := parser.NewParser(text)
|
||||
config, err := p.Parse()
|
||||
config, _ := p.Parse()
|
||||
|
||||
if err != nil {
|
||||
publishParserError(uri, err)
|
||||
} else {
|
||||
publishParserError(uri, nil)
|
||||
}
|
||||
publishParserErrors(uri, p.Errors())
|
||||
|
||||
if config != nil {
|
||||
Tree.AddFile(path, config)
|
||||
@@ -465,6 +457,9 @@ func runValidation(_ string) {
|
||||
// Collect all known files to ensure we clear diagnostics for fixed files
|
||||
knownFiles := make(map[string]bool)
|
||||
collectFiles(Tree.Root, knownFiles)
|
||||
for _, node := range Tree.IsolatedFiles {
|
||||
collectFiles(node, knownFiles)
|
||||
}
|
||||
|
||||
// Initialize all known files with empty diagnostics
|
||||
for f := range knownFiles {
|
||||
@@ -473,8 +468,10 @@ func runValidation(_ string) {
|
||||
|
||||
for _, d := range v.Diagnostics {
|
||||
severity := 1 // Error
|
||||
levelStr := "ERROR"
|
||||
if d.Level == validator.LevelWarning {
|
||||
severity = 2 // Warning
|
||||
levelStr = "WARNING"
|
||||
}
|
||||
|
||||
diag := LSPDiagnostic{
|
||||
@@ -483,7 +480,7 @@ func runValidation(_ string) {
|
||||
End: Position{Line: d.Position.Line - 1, Character: d.Position.Column - 1 + 10}, // Arbitrary length
|
||||
},
|
||||
Severity: severity,
|
||||
Message: d.Message,
|
||||
Message: fmt.Sprintf("%s: %s", levelStr, d.Message),
|
||||
Source: "mdt",
|
||||
}
|
||||
|
||||
@@ -508,44 +505,36 @@ func runValidation(_ string) {
|
||||
}
|
||||
}
|
||||
|
||||
func publishParserError(uri string, err error) {
|
||||
if err == nil {
|
||||
notification := JsonRpcMessage{
|
||||
Jsonrpc: "2.0",
|
||||
Method: "textDocument/publishDiagnostics",
|
||||
Params: mustMarshal(PublishDiagnosticsParams{
|
||||
URI: uri,
|
||||
Diagnostics: []LSPDiagnostic{},
|
||||
}),
|
||||
}
|
||||
send(notification)
|
||||
return
|
||||
}
|
||||
func publishParserErrors(uri string, errors []error) {
|
||||
diagnostics := []LSPDiagnostic{}
|
||||
|
||||
var line, col int
|
||||
var msg string
|
||||
// Try parsing "line:col: message"
|
||||
n, _ := fmt.Sscanf(err.Error(), "%d:%d: ", &line, &col)
|
||||
if n == 2 {
|
||||
parts := strings.SplitN(err.Error(), ": ", 2)
|
||||
if len(parts) == 2 {
|
||||
msg = parts[1]
|
||||
for _, err := range errors {
|
||||
var line, col int
|
||||
var msg string
|
||||
// Try parsing "line:col: message"
|
||||
n, _ := fmt.Sscanf(err.Error(), "%d:%d: ", &line, &col)
|
||||
if n == 2 {
|
||||
parts := strings.SplitN(err.Error(), ": ", 2)
|
||||
if len(parts) == 2 {
|
||||
msg = parts[1]
|
||||
}
|
||||
} else {
|
||||
// Fallback
|
||||
line = 1
|
||||
col = 1
|
||||
msg = err.Error()
|
||||
}
|
||||
} else {
|
||||
// Fallback
|
||||
line = 1
|
||||
col = 1
|
||||
msg = err.Error()
|
||||
}
|
||||
|
||||
diag := LSPDiagnostic{
|
||||
Range: Range{
|
||||
Start: Position{Line: line - 1, Character: col - 1},
|
||||
End: Position{Line: line - 1, Character: col},
|
||||
},
|
||||
Severity: 1, // Error
|
||||
Message: msg,
|
||||
Source: "mdt-parser",
|
||||
diag := LSPDiagnostic{
|
||||
Range: Range{
|
||||
Start: Position{Line: line - 1, Character: col - 1},
|
||||
End: Position{Line: line - 1, Character: col},
|
||||
},
|
||||
Severity: 1, // Error
|
||||
Message: msg,
|
||||
Source: "mdt-parser",
|
||||
}
|
||||
diagnostics = append(diagnostics, diag)
|
||||
}
|
||||
|
||||
notification := JsonRpcMessage{
|
||||
@@ -553,13 +542,16 @@ func publishParserError(uri string, err error) {
|
||||
Method: "textDocument/publishDiagnostics",
|
||||
Params: mustMarshal(PublishDiagnosticsParams{
|
||||
URI: uri,
|
||||
Diagnostics: []LSPDiagnostic{diag},
|
||||
Diagnostics: diagnostics,
|
||||
}),
|
||||
}
|
||||
send(notification)
|
||||
}
|
||||
|
||||
func collectFiles(node *index.ProjectNode, files map[string]bool) {
|
||||
if node == nil {
|
||||
return
|
||||
}
|
||||
for _, frag := range node.Fragments {
|
||||
files[frag.File] = true
|
||||
}
|
||||
|
||||
@@ -299,6 +299,8 @@ func (p *Parser) parseAtom() (Value, bool) {
|
||||
return &ReferenceValue{Position: tok.Position, Value: tok.Value}, true
|
||||
case TokenVariableReference:
|
||||
return &VariableReferenceValue{Position: tok.Position, Name: tok.Value}, true
|
||||
case TokenObjectIdentifier:
|
||||
return &VariableReferenceValue{Position: tok.Position, Name: tok.Value}, true
|
||||
case TokenLBrace:
|
||||
arr := &ArrayValue{Position: tok.Position}
|
||||
for {
|
||||
@@ -380,3 +382,7 @@ func (p *Parser) parseVariableDefinition(startTok Token) (Definition, bool) {
|
||||
DefaultValue: defVal,
|
||||
}, true
|
||||
}
|
||||
|
||||
func (p *Parser) Errors() []error {
|
||||
return p.errors
|
||||
}
|
||||
|
||||
@@ -304,7 +304,7 @@ func (v *Validator) validateGAMSignal(gamNode, signalNode *index.ProjectNode, di
|
||||
return // Ignore implicit signals or missing datasource (handled elsewhere if mandatory)
|
||||
}
|
||||
|
||||
dsNode := v.resolveReference(dsName, v.getNodeFile(signalNode), isDataSource)
|
||||
dsNode := v.resolveReference(dsName, signalNode, isDataSource)
|
||||
if dsNode == nil {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelError,
|
||||
@@ -442,6 +442,36 @@ func (v *Validator) validateGAMSignal(gamNode, signalNode *index.ProjectNode, di
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate Value initialization
|
||||
if valField, hasValue := fields["Value"]; hasValue && len(valField) > 0 {
|
||||
var typeStr string
|
||||
if typeFields, ok := fields["Type"]; ok && len(typeFields) > 0 {
|
||||
typeStr = v.getFieldValue(typeFields[0], signalNode)
|
||||
} else if signalNode.Target != nil {
|
||||
if t, ok := signalNode.Target.Metadata["Type"]; ok {
|
||||
typeStr = t
|
||||
}
|
||||
}
|
||||
|
||||
if typeStr != "" && v.Schema != nil {
|
||||
ctx := v.Schema.Context
|
||||
typeVal := ctx.CompileString(typeStr)
|
||||
if typeVal.Err() == nil {
|
||||
valInterface := v.valueToInterface(valField[0].Value, signalNode)
|
||||
valVal := ctx.Encode(valInterface)
|
||||
res := typeVal.Unify(valVal)
|
||||
if err := res.Validate(cue.Concrete(true)); err != nil {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelError,
|
||||
Message: fmt.Sprintf("Value initialization mismatch for signal '%s': %v", signalNode.RealName, err),
|
||||
Position: valField[0].Position,
|
||||
File: v.getNodeFile(signalNode),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (v *Validator) checkSignalProperty(gamSig, dsSig *index.ProjectNode, prop string) {
|
||||
@@ -535,17 +565,8 @@ func (v *Validator) getFieldValue(f *parser.Field, ctx *index.ProjectNode) strin
|
||||
return ""
|
||||
}
|
||||
|
||||
func (v *Validator) resolveReference(name string, file string, predicate func(*index.ProjectNode) bool) *index.ProjectNode {
|
||||
if isoNode, ok := v.Tree.IsolatedFiles[file]; ok {
|
||||
if found := v.Tree.FindNode(isoNode, name, predicate); found != nil {
|
||||
return found
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if v.Tree.Root == nil {
|
||||
return nil
|
||||
}
|
||||
return v.Tree.FindNode(v.Tree.Root, name, predicate)
|
||||
func (v *Validator) resolveReference(name string, ctx *index.ProjectNode, predicate func(*index.ProjectNode) bool) *index.ProjectNode {
|
||||
return v.Tree.ResolveName(ctx, name, predicate)
|
||||
}
|
||||
|
||||
func (v *Validator) getNodeClass(node *index.ProjectNode) string {
|
||||
@@ -710,7 +731,7 @@ func (v *Validator) checkFunctionsArray(node *index.ProjectNode, fields map[stri
|
||||
if arr, ok := f.Value.(*parser.ArrayValue); ok {
|
||||
for _, elem := range arr.Elements {
|
||||
if ref, ok := elem.(*parser.ReferenceValue); ok {
|
||||
target := v.resolveReference(ref.Value, v.getNodeFile(node), isGAM)
|
||||
target := v.resolveReference(ref.Value, node, isGAM)
|
||||
if target == nil {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelError,
|
||||
@@ -769,19 +790,20 @@ func (v *Validator) CheckDataSourceThreading() {
|
||||
return
|
||||
}
|
||||
|
||||
// 1. Find RealTimeApplication
|
||||
var appNode *index.ProjectNode
|
||||
var appNodes []*index.ProjectNode
|
||||
findApp := func(n *index.ProjectNode) {
|
||||
if cls, ok := n.Metadata["Class"]; ok && cls == "RealTimeApplication" {
|
||||
appNode = n
|
||||
appNodes = append(appNodes, n)
|
||||
}
|
||||
}
|
||||
v.Tree.Walk(findApp)
|
||||
|
||||
if appNode == nil {
|
||||
return
|
||||
for _, appNode := range appNodes {
|
||||
v.checkAppDataSourceThreading(appNode)
|
||||
}
|
||||
}
|
||||
|
||||
func (v *Validator) checkAppDataSourceThreading(appNode *index.ProjectNode) {
|
||||
// 2. Find States
|
||||
var statesNode *index.ProjectNode
|
||||
if s, ok := appNode.Children["States"]; ok {
|
||||
@@ -852,7 +874,7 @@ func (v *Validator) getThreadGAMs(thread *index.ProjectNode) []*index.ProjectNod
|
||||
if arr, ok := f.Value.(*parser.ArrayValue); ok {
|
||||
for _, elem := range arr.Elements {
|
||||
if ref, ok := elem.(*parser.ReferenceValue); ok {
|
||||
target := v.resolveReference(ref.Value, v.getNodeFile(thread), isGAM)
|
||||
target := v.resolveReference(ref.Value, thread, isGAM)
|
||||
if target != nil {
|
||||
gams = append(gams, target)
|
||||
}
|
||||
@@ -874,7 +896,7 @@ func (v *Validator) getGAMDataSources(gam *index.ProjectNode) []*index.ProjectNo
|
||||
fields := v.getFields(sig)
|
||||
if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 {
|
||||
dsName := v.getFieldValue(dsFields[0], sig)
|
||||
dsNode := v.resolveReference(dsName, v.getNodeFile(sig), isDataSource)
|
||||
dsNode := v.resolveReference(dsName, sig, isDataSource)
|
||||
if dsNode != nil {
|
||||
dsMap[dsNode] = true
|
||||
}
|
||||
@@ -908,18 +930,20 @@ func (v *Validator) CheckINOUTOrdering() {
|
||||
return
|
||||
}
|
||||
|
||||
var appNode *index.ProjectNode
|
||||
var appNodes []*index.ProjectNode
|
||||
findApp := func(n *index.ProjectNode) {
|
||||
if cls, ok := n.Metadata["Class"]; ok && cls == "RealTimeApplication" {
|
||||
appNode = n
|
||||
appNodes = append(appNodes, n)
|
||||
}
|
||||
}
|
||||
v.Tree.Walk(findApp)
|
||||
|
||||
if appNode == nil {
|
||||
return
|
||||
for _, appNode := range appNodes {
|
||||
v.checkAppINOUTOrdering(appNode)
|
||||
}
|
||||
}
|
||||
|
||||
func (v *Validator) checkAppINOUTOrdering(appNode *index.ProjectNode) {
|
||||
var statesNode *index.ProjectNode
|
||||
if s, ok := appNode.Children["States"]; ok {
|
||||
statesNode = s
|
||||
@@ -936,6 +960,7 @@ func (v *Validator) CheckINOUTOrdering() {
|
||||
return
|
||||
}
|
||||
|
||||
suppress := v.isGloballyAllowed("not_consumed", v.getNodeFile(appNode))
|
||||
for _, state := range statesNode.Children {
|
||||
var threads []*index.ProjectNode
|
||||
for _, child := range state.Children {
|
||||
@@ -961,24 +986,34 @@ func (v *Validator) CheckINOUTOrdering() {
|
||||
v.processGAMSignalsForOrdering(gam, "InputSignals", producedSignals, consumedSignals, true, thread, state)
|
||||
v.processGAMSignalsForOrdering(gam, "OutputSignals", producedSignals, consumedSignals, false, thread, state)
|
||||
}
|
||||
|
||||
// Check for produced but not consumed
|
||||
for ds, signals := range producedSignals {
|
||||
for sigName, producers := range signals {
|
||||
consumed := false
|
||||
if cSet, ok := consumedSignals[ds]; ok {
|
||||
if cSet[sigName] {
|
||||
consumed = true
|
||||
if !suppress {
|
||||
// Check for produced but not consumed
|
||||
for ds, signals := range producedSignals {
|
||||
for sigName, producers := range signals {
|
||||
consumed := false
|
||||
if cSet, ok := consumedSignals[ds]; ok {
|
||||
if cSet[sigName] {
|
||||
consumed = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if !consumed {
|
||||
for _, prod := range producers {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelWarning,
|
||||
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is produced in thread '%s' but never consumed in the same thread.", sigName, ds.RealName, thread.RealName),
|
||||
Position: v.getNodePosition(prod),
|
||||
File: v.getNodeFile(prod),
|
||||
})
|
||||
if !consumed {
|
||||
for _, prod := range producers {
|
||||
locally_suppressed := false
|
||||
for _, p := range prod.Pragmas {
|
||||
if strings.HasPrefix(p, "not_consumed:") || strings.HasPrefix(p, "ignore(not_consumed)") {
|
||||
locally_suppressed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !locally_suppressed {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelWarning,
|
||||
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is produced in thread '%s' but never consumed in the same thread.", sigName, ds.RealName, thread.RealName),
|
||||
Position: v.getNodePosition(prod),
|
||||
File: v.getNodeFile(prod),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -992,7 +1027,7 @@ func (v *Validator) processGAMSignalsForOrdering(gam *index.ProjectNode, contain
|
||||
if container == nil {
|
||||
return
|
||||
}
|
||||
|
||||
not_produced_suppress := v.isGloballyAllowed("not_produced", v.getNodeFile(gam))
|
||||
for _, sig := range container.Children {
|
||||
fields := v.getFields(sig)
|
||||
var dsNode *index.ProjectNode
|
||||
@@ -1008,7 +1043,7 @@ func (v *Validator) processGAMSignalsForOrdering(gam *index.ProjectNode, contain
|
||||
if dsNode == nil {
|
||||
if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 {
|
||||
dsName := v.getFieldValue(dsFields[0], sig)
|
||||
dsNode = v.resolveReference(dsName, v.getNodeFile(sig), isDataSource)
|
||||
dsNode = v.resolveReference(dsName, sig, isDataSource)
|
||||
}
|
||||
if aliasFields, ok := fields["Alias"]; ok && len(aliasFields) > 0 {
|
||||
sigName = v.getFieldValue(aliasFields[0], sig)
|
||||
@@ -1033,22 +1068,39 @@ func (v *Validator) processGAMSignalsForOrdering(gam *index.ProjectNode, contain
|
||||
}
|
||||
|
||||
if isInput {
|
||||
isProduced := false
|
||||
if set, ok := produced[dsNode]; ok {
|
||||
if len(set[sigName]) > 0 {
|
||||
isProduced = true
|
||||
// Check if signal has 'Value' field - treat as produced/initialized
|
||||
if _, hasValue := fields["Value"]; hasValue {
|
||||
if produced[dsNode] == nil {
|
||||
produced[dsNode] = make(map[string][]*index.ProjectNode)
|
||||
}
|
||||
produced[dsNode][sigName] = append(produced[dsNode][sigName], sig)
|
||||
}
|
||||
|
||||
if !isProduced {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelError,
|
||||
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is consumed by GAM '%s' in thread '%s' (State '%s') before being produced by any previous GAM.", sigName, dsNode.RealName, gam.RealName, thread.RealName, state.RealName),
|
||||
Position: v.getNodePosition(sig),
|
||||
File: v.getNodeFile(sig),
|
||||
})
|
||||
}
|
||||
if !not_produced_suppress {
|
||||
isProduced := false
|
||||
if set, ok := produced[dsNode]; ok {
|
||||
if len(set[sigName]) > 0 {
|
||||
isProduced = true
|
||||
}
|
||||
}
|
||||
locally_suppressed := false
|
||||
for _, p := range sig.Pragmas {
|
||||
if strings.HasPrefix(p, "not_produced:") || strings.HasPrefix(p, "ignore(not_produced)") {
|
||||
locally_suppressed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !isProduced && !locally_suppressed {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelError,
|
||||
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is consumed by GAM '%s' in thread '%s' (State '%s') before being produced by any previous GAM.", sigName, dsNode.RealName, gam.RealName, thread.RealName, state.RealName),
|
||||
Position: v.getNodePosition(sig),
|
||||
File: v.getNodeFile(sig),
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
if consumed[dsNode] == nil {
|
||||
consumed[dsNode] = make(map[string]bool)
|
||||
}
|
||||
@@ -1120,16 +1172,16 @@ func (v *Validator) CheckVariables() {
|
||||
}
|
||||
|
||||
v.Tree.Walk(checkNodeVars)
|
||||
}
|
||||
func (v *Validator) CheckUnresolvedVariables() {
|
||||
for _, ref := range v.Tree.References {
|
||||
if ref.IsVariable && ref.TargetVariable == nil {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelError,
|
||||
Message: fmt.Sprintf("Unresolved variable reference: '@%s'", ref.Name),
|
||||
Position: ref.Position,
|
||||
File: ref.File,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
func (v *Validator) CheckUnresolvedVariables() {
|
||||
for _, ref := range v.Tree.References {
|
||||
if ref.IsVariable && ref.TargetVariable == nil {
|
||||
v.Diagnostics = append(v.Diagnostics, Diagnostic{
|
||||
Level: LevelError,
|
||||
Message: fmt.Sprintf("Unresolved variable reference: '@%s'", ref.Name),
|
||||
Position: ref.Position,
|
||||
File: ref.File,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,11 +79,6 @@ func TestLSPAppTestRepro(t *testing.T) {
|
||||
t.Error("LSP missing unresolved variable error")
|
||||
}
|
||||
|
||||
// Check INOUT consumed but not produced
|
||||
if !strings.Contains(output, "consumed by GAM '+FnA'") {
|
||||
t.Error("LSP missing consumed but not produced error")
|
||||
}
|
||||
|
||||
if t.Failed() {
|
||||
t.Log(output)
|
||||
}
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestLSPBinaryDiagnostics(t *testing.T) {
|
||||
// 1. Build mdt
|
||||
// Ensure we are in test directory context
|
||||
buildCmd := exec.Command("go", "build", "-o", "../build/mdt", "../cmd/mdt")
|
||||
if output, err := buildCmd.CombinedOutput(); err != nil {
|
||||
t.Fatalf("Failed to build mdt: %v\nOutput: %s", err, output)
|
||||
}
|
||||
|
||||
// 2. Start mdt lsp
|
||||
cmd := exec.Command("../build/mdt", "lsp")
|
||||
stdin, _ := cmd.StdinPipe()
|
||||
stdout, _ := cmd.StdoutPipe()
|
||||
stderr, _ := cmd.StderrPipe()
|
||||
|
||||
// Pipe stderr to test log for debugging
|
||||
go func() {
|
||||
scanner := bufio.NewScanner(stderr)
|
||||
for scanner.Scan() {
|
||||
t.Logf("LSP STDERR: %s", scanner.Text())
|
||||
}
|
||||
}()
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
t.Fatalf("Failed to start mdt lsp: %v", err)
|
||||
}
|
||||
defer func() {
|
||||
cmd.Process.Kill()
|
||||
cmd.Wait()
|
||||
}()
|
||||
|
||||
reader := bufio.NewReader(stdout)
|
||||
|
||||
send := func(m interface{}) {
|
||||
body, _ := json.Marshal(m)
|
||||
msg := fmt.Sprintf("Content-Length: %d\r\n\r\n%s", len(body), body)
|
||||
stdin.Write([]byte(msg))
|
||||
}
|
||||
|
||||
readCh := make(chan map[string]interface{}, 100)
|
||||
|
||||
go func() { for {
|
||||
// Parse Header
|
||||
line, err := reader.ReadString('\n')
|
||||
if err != nil {
|
||||
close(readCh)
|
||||
return
|
||||
}
|
||||
var length int
|
||||
// Handle Content-Length: <len>\r\n
|
||||
if _, err := fmt.Sscanf(strings.TrimSpace(line), "Content-Length: %d", &length); err != nil {
|
||||
// Maybe empty line or other header?
|
||||
continue
|
||||
}
|
||||
|
||||
// Read until empty line (\r\n)
|
||||
for {
|
||||
l, err := reader.ReadString('\n')
|
||||
if err != nil {
|
||||
close(readCh)
|
||||
return
|
||||
}
|
||||
if l == "\r\n" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
body := make([]byte, length)
|
||||
if _, err := io.ReadFull(reader, body); err != nil {
|
||||
close(readCh)
|
||||
return
|
||||
}
|
||||
|
||||
var m map[string]interface{}
|
||||
if err := json.Unmarshal(body, &m); err == nil {
|
||||
readCh <- m
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
cwd, _ := os.Getwd()
|
||||
projectRoot := filepath.Dir(cwd)
|
||||
absPath := filepath.Join(projectRoot, "examples/app_test.marte")
|
||||
uri := "file://" + absPath
|
||||
|
||||
// 3. Initialize
|
||||
examplesDir := filepath.Join(projectRoot, "examples")
|
||||
send(map[string]interface{}{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"method": "initialize",
|
||||
"params": map[string]interface{}{
|
||||
"rootUri": "file://" + examplesDir,
|
||||
},
|
||||
})
|
||||
|
||||
// 4. Open app_test.marte
|
||||
content, err := os.ReadFile(absPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read test file: %v", err)
|
||||
}
|
||||
send(map[string]interface{}{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "textDocument/didOpen",
|
||||
"params": map[string]interface{}{
|
||||
"textDocument": map[string]interface{}{
|
||||
"uri": uri,
|
||||
"languageId": "marte",
|
||||
"version": 1,
|
||||
"text": string(content),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// 5. Wait for diagnostics
|
||||
foundOrdering := false
|
||||
foundVariable := false
|
||||
|
||||
timeout := time.After(30 * time.Second)
|
||||
|
||||
for {
|
||||
select {
|
||||
case msg, ok := <-readCh:
|
||||
if !ok {
|
||||
t.Fatal("LSP stream closed unexpectedly")
|
||||
}
|
||||
t.Logf("Received: %v", msg)
|
||||
if method, ok := msg["method"].(string); ok && method == "textDocument/publishDiagnostics" {
|
||||
params := msg["params"].(map[string]interface{})
|
||||
// Check URI match?
|
||||
// if params["uri"] != uri { continue } // Might be absolute vs relative
|
||||
|
||||
diags := params["diagnostics"].([]interface{})
|
||||
for _, d := range diags {
|
||||
m := d.(map[string]interface{})["message"].(string)
|
||||
if strings.Contains(m, "INOUT Signal 'A'") {
|
||||
foundOrdering = true
|
||||
t.Log("Found Ordering error")
|
||||
}
|
||||
if strings.Contains(m, "Unresolved variable reference: '@Value'") {
|
||||
foundVariable = true
|
||||
t.Log("Found Variable error")
|
||||
}
|
||||
}
|
||||
if foundOrdering && foundVariable {
|
||||
return // Success
|
||||
}
|
||||
}
|
||||
case <-timeout:
|
||||
t.Fatal("Timeout waiting for diagnostics")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -92,13 +92,7 @@ func TestLSPDiagnosticsAppTest(t *testing.T) {
|
||||
t.Error("Missing diagnostic for unresolved variable '@Value'")
|
||||
}
|
||||
|
||||
// 2. Check INOUT Ordering Error (Signal A consumed but not produced)
|
||||
// Message format: INOUT Signal 'A' (DS '+DDB') is consumed by GAM '+FnA' ... before being produced ...
|
||||
if !strings.Contains(output, "INOUT Signal 'A'") || !strings.Contains(output, "before being produced") {
|
||||
t.Error("Missing diagnostic for INOUT ordering error (Signal A)")
|
||||
}
|
||||
|
||||
// 3. Check INOUT Unused Warning (Signal B produced but not consumed)
|
||||
// 2. Check INOUT Unused Warning (Signal B produced but not consumed)
|
||||
// Message format: INOUT Signal 'B' ... produced ... but never consumed ...
|
||||
if !strings.Contains(output, "INOUT Signal 'B'") || !strings.Contains(output, "never consumed") {
|
||||
t.Error("Missing diagnostic for unused INOUT signal (Signal B)")
|
||||
|
||||
101
test/lsp_fuzz_test.go
Normal file
101
test/lsp_fuzz_test.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/marte-community/marte-dev-tools/internal/lsp"
|
||||
)
|
||||
|
||||
func TestIncrementalFuzz(t *testing.T) {
|
||||
// Initialize
|
||||
lsp.Documents = make(map[string]string)
|
||||
uri := "file://fuzz.marte"
|
||||
currentText := ""
|
||||
lsp.Documents[uri] = currentText
|
||||
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
|
||||
// Apply 1000 random edits
|
||||
for i := 0; i < 1000; i++ {
|
||||
// Randomly choose Insert or Delete
|
||||
isInsert := rand.Intn(2) == 0
|
||||
|
||||
change := lsp.TextDocumentContentChangeEvent{}
|
||||
|
||||
// Use simple ascii string
|
||||
length := len(currentText)
|
||||
|
||||
if isInsert || length == 0 {
|
||||
// Insert
|
||||
pos := 0
|
||||
if length > 0 {
|
||||
pos = rand.Intn(length + 1)
|
||||
}
|
||||
|
||||
insertStr := "X"
|
||||
if rand.Intn(5) == 0 { insertStr = "\n" }
|
||||
if rand.Intn(10) == 0 { insertStr = "longstring" }
|
||||
|
||||
// Calculate Line/Char for pos
|
||||
line, char := offsetToLineChar(currentText, pos)
|
||||
|
||||
change.Range = &lsp.Range{
|
||||
Start: lsp.Position{Line: line, Character: char},
|
||||
End: lsp.Position{Line: line, Character: char},
|
||||
}
|
||||
change.Text = insertStr
|
||||
|
||||
// Expected
|
||||
currentText = currentText[:pos] + insertStr + currentText[pos:]
|
||||
} else {
|
||||
// Delete
|
||||
start := rand.Intn(length)
|
||||
end := start + 1 + rand.Intn(length - start) // at least 1 char
|
||||
|
||||
// Range
|
||||
l1, c1 := offsetToLineChar(currentText, start)
|
||||
l2, c2 := offsetToLineChar(currentText, end)
|
||||
|
||||
change.Range = &lsp.Range{
|
||||
Start: lsp.Position{Line: l1, Character: c1},
|
||||
End: lsp.Position{Line: l2, Character: c2},
|
||||
}
|
||||
change.Text = ""
|
||||
|
||||
currentText = currentText[:start] + currentText[end:]
|
||||
}
|
||||
|
||||
// Apply
|
||||
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
|
||||
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri, Version: i},
|
||||
ContentChanges: []lsp.TextDocumentContentChangeEvent{change},
|
||||
})
|
||||
|
||||
// Verify
|
||||
if lsp.Documents[uri] != currentText {
|
||||
t.Fatalf("Fuzz iteration %d failed.\nExpected len: %d\nGot len: %d\nChange: %+v", i, len(currentText), len(lsp.Documents[uri]), change)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func offsetToLineChar(text string, offset int) (int, int) {
|
||||
line := 0
|
||||
char := 0
|
||||
for i, r := range text {
|
||||
if i == offset {
|
||||
return line, char
|
||||
}
|
||||
if r == '\n' {
|
||||
line++
|
||||
char = 0
|
||||
} else {
|
||||
char++
|
||||
}
|
||||
}
|
||||
if offset == len(text) {
|
||||
return line, char
|
||||
}
|
||||
return -1, -1
|
||||
}
|
||||
204
test/lsp_incremental_correctness_test.go
Normal file
204
test/lsp_incremental_correctness_test.go
Normal file
@@ -0,0 +1,204 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/marte-community/marte-dev-tools/internal/index"
|
||||
"github.com/marte-community/marte-dev-tools/internal/lsp"
|
||||
"github.com/marte-community/marte-dev-tools/internal/schema"
|
||||
)
|
||||
|
||||
func TestIncrementalCorrectness(t *testing.T) {
|
||||
lsp.Documents = make(map[string]string)
|
||||
uri := "file://test.txt"
|
||||
initial := "12345\n67890"
|
||||
lsp.Documents[uri] = initial
|
||||
|
||||
// Edit 1: Insert "A" at 0:1 -> "1A2345\n67890"
|
||||
change1 := lsp.TextDocumentContentChangeEvent{
|
||||
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 1}, End: lsp.Position{Line: 0, Character: 1}},
|
||||
Text: "A",
|
||||
}
|
||||
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
|
||||
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
|
||||
ContentChanges: []lsp.TextDocumentContentChangeEvent{change1},
|
||||
})
|
||||
|
||||
if lsp.Documents[uri] != "1A2345\n67890" {
|
||||
t.Errorf("Edit 1 failed: %q", lsp.Documents[uri])
|
||||
}
|
||||
|
||||
// Edit 2: Delete newline (merge lines)
|
||||
// "1A2345\n67890" -> "1A234567890"
|
||||
// \n is at index 6.
|
||||
// 0:6 points to \n? "1A2345" length is 6.
|
||||
// So 0:6 is AFTER '5', at '\n'.
|
||||
// 1:0 is AFTER '\n', at '6'.
|
||||
// Range 0:6 - 1:0 covers '\n'.
|
||||
change2 := lsp.TextDocumentContentChangeEvent{
|
||||
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 6}, End: lsp.Position{Line: 1, Character: 0}},
|
||||
Text: "",
|
||||
}
|
||||
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
|
||||
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
|
||||
ContentChanges: []lsp.TextDocumentContentChangeEvent{change2},
|
||||
})
|
||||
|
||||
if lsp.Documents[uri] != "1A234567890" {
|
||||
t.Errorf("Edit 2 failed: %q", lsp.Documents[uri])
|
||||
}
|
||||
|
||||
// Edit 3: Add newline at end
|
||||
// "1A234567890" len 11.
|
||||
// 0:11.
|
||||
change3 := lsp.TextDocumentContentChangeEvent{
|
||||
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 11}, End: lsp.Position{Line: 0, Character: 11}},
|
||||
Text: "\n",
|
||||
}
|
||||
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
|
||||
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
|
||||
ContentChanges: []lsp.TextDocumentContentChangeEvent{change3},
|
||||
})
|
||||
|
||||
if lsp.Documents[uri] != "1A234567890\n" {
|
||||
t.Errorf("Edit 3 failed: %q", lsp.Documents[uri])
|
||||
}
|
||||
}
|
||||
|
||||
func TestIncrementalAppValidation(t *testing.T) {
|
||||
// Setup
|
||||
lsp.Tree = index.NewProjectTree()
|
||||
lsp.Documents = make(map[string]string)
|
||||
lsp.GlobalSchema = schema.LoadFullSchema(".")
|
||||
var buf bytes.Buffer
|
||||
lsp.Output = &buf
|
||||
|
||||
content := `// Test app
|
||||
+App = {
|
||||
Class = RealTimeApplication
|
||||
+Data = {
|
||||
Class = ReferenceContainer
|
||||
DefaultDataSource = DDB
|
||||
+DDB = {
|
||||
Class = GAMDataSource
|
||||
}
|
||||
+TimingDataSource = {
|
||||
Class = TimingDataSource
|
||||
}
|
||||
}
|
||||
+Functions = {
|
||||
Class = ReferenceContainer
|
||||
+A = {
|
||||
Class = IOGAM
|
||||
InputSignals = {
|
||||
A = {
|
||||
DataSource = DDB
|
||||
Type = uint32
|
||||
// Placeholder
|
||||
}
|
||||
}
|
||||
OutputSignals = {
|
||||
B = {
|
||||
DataSource = DDB
|
||||
Type = uint32
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
+States = {
|
||||
Class = ReferenceContainer
|
||||
+State = {
|
||||
Class =RealTimeState
|
||||
Threads = {
|
||||
+Th1 = {
|
||||
Class = RealTimeThread
|
||||
Functions = {A}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
+Scheduler = {
|
||||
Class = GAMScheduler
|
||||
TimingDataSource = TimingDataSource
|
||||
}
|
||||
}
|
||||
`
|
||||
uri := "file://app_inc.marte"
|
||||
|
||||
// 1. Open
|
||||
lsp.HandleDidOpen(lsp.DidOpenTextDocumentParams{
|
||||
TextDocument: lsp.TextDocumentItem{URI: uri, Text: content},
|
||||
})
|
||||
|
||||
out := buf.String()
|
||||
|
||||
// Signal A is never produced. Should have consumed error.
|
||||
if !strings.Contains(out, "ERROR: INOUT Signal 'A'") {
|
||||
t.Error("Missing consumed error for A")
|
||||
}
|
||||
// Signal B is Output, never consumed.
|
||||
if !strings.Contains(out, "WARNING: INOUT Signal 'B'") {
|
||||
t.Error("Missing produced error for B")
|
||||
}
|
||||
|
||||
buf.Reset()
|
||||
|
||||
// 2. Insert comment at start
|
||||
// Expecting same errors
|
||||
change1 := lsp.TextDocumentContentChangeEvent{
|
||||
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 0}, End: lsp.Position{Line: 0, Character: 0}},
|
||||
Text: "// Comment\n",
|
||||
}
|
||||
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
|
||||
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
|
||||
ContentChanges: []lsp.TextDocumentContentChangeEvent{change1},
|
||||
})
|
||||
|
||||
out = buf.String()
|
||||
// Signal A is never produced. Should have consumed error.
|
||||
if !strings.Contains(out, "ERROR: INOUT Signal 'A'") {
|
||||
t.Error("Missing consumed error for A")
|
||||
}
|
||||
// Signal B is Output, never consumed.
|
||||
if !strings.Contains(out, "WARNING: INOUT Signal 'B'") {
|
||||
t.Error("Missing produced error for B")
|
||||
}
|
||||
|
||||
buf.Reset()
|
||||
|
||||
// 3. Add Value to A
|
||||
currentText := lsp.Documents[uri]
|
||||
idx := strings.Index(currentText, "Placeholder")
|
||||
if idx == -1 {
|
||||
t.Fatal("Could not find anchor string")
|
||||
}
|
||||
|
||||
idx = strings.Index(currentText[idx:], "\n") + idx
|
||||
insertPos := idx + 1
|
||||
|
||||
line, char := offsetToLineChar(currentText, insertPos)
|
||||
|
||||
change2 := lsp.TextDocumentContentChangeEvent{
|
||||
Range: &lsp.Range{Start: lsp.Position{Line: line, Character: char}, End: lsp.Position{Line: line, Character: char}},
|
||||
Text: "Value = 10\n",
|
||||
}
|
||||
|
||||
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
|
||||
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
|
||||
ContentChanges: []lsp.TextDocumentContentChangeEvent{change2},
|
||||
})
|
||||
|
||||
out = buf.String()
|
||||
|
||||
// Signal A has now a Value field and so it is produced. Should NOT have consumed error.
|
||||
if strings.Contains(out, "ERROR: INOUT Signal 'A'") {
|
||||
t.Error("Unexpected consumed error for A")
|
||||
}
|
||||
// Signal B is Output, never consumed.
|
||||
if !strings.Contains(out, "WARNING: INOUT Signal 'B'") {
|
||||
t.Error("Missing produced error for B")
|
||||
}
|
||||
|
||||
}
|
||||
44
test/lsp_value_validation_test.go
Normal file
44
test/lsp_value_validation_test.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/marte-community/marte-dev-tools/internal/index"
|
||||
"github.com/marte-community/marte-dev-tools/internal/lsp"
|
||||
"github.com/marte-community/marte-dev-tools/internal/schema"
|
||||
)
|
||||
|
||||
func TestLSPValueValidation(t *testing.T) {
|
||||
lsp.Tree = index.NewProjectTree()
|
||||
lsp.Documents = make(map[string]string)
|
||||
lsp.GlobalSchema = schema.LoadFullSchema(".")
|
||||
|
||||
var buf bytes.Buffer
|
||||
lsp.Output = &buf
|
||||
|
||||
content := `
|
||||
+Data = {
|
||||
Class = ReferenceContainer
|
||||
+DS = { Class = GAMDataSource Signals = { S = { Type = uint8 } } }
|
||||
}
|
||||
+GAM = {
|
||||
Class = IOGAM
|
||||
InputSignals = {
|
||||
S = { DataSource = DS Type = uint8 Value = 1024 }
|
||||
}
|
||||
}
|
||||
+App = { Class = RealTimeApplication +States = { Class = ReferenceContainer +S = { Class = RealTimeState Threads = { +T = { Class = RealTimeThread Functions = { GAM } } } } } }
|
||||
`
|
||||
uri := "file://value.marte"
|
||||
lsp.HandleDidOpen(lsp.DidOpenTextDocumentParams{
|
||||
TextDocument: lsp.TextDocumentItem{URI: uri, Text: content},
|
||||
})
|
||||
|
||||
output := buf.String()
|
||||
if !strings.Contains(output, "Value initialization mismatch") {
|
||||
t.Error("LSP did not report value validation error")
|
||||
t.Log(output)
|
||||
}
|
||||
}
|
||||
101
test/validator_inout_value_test.go
Normal file
101
test/validator_inout_value_test.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/marte-community/marte-dev-tools/internal/index"
|
||||
"github.com/marte-community/marte-dev-tools/internal/parser"
|
||||
"github.com/marte-community/marte-dev-tools/internal/validator"
|
||||
)
|
||||
|
||||
func TestINOUTValueInitialization(t *testing.T) {
|
||||
content := `
|
||||
+Data = {
|
||||
Class = ReferenceContainer
|
||||
+MyDS = {
|
||||
Class = GAMDataSource
|
||||
#meta = { multithreaded = false }
|
||||
Signals = { Sig1 = { Type = uint32 } }
|
||||
}
|
||||
}
|
||||
+GAM1 = {
|
||||
Class = IOGAM
|
||||
InputSignals = {
|
||||
Sig1 = {
|
||||
DataSource = MyDS
|
||||
Type = uint32
|
||||
Value = 10 // Initialization
|
||||
}
|
||||
}
|
||||
}
|
||||
+GAM2 = {
|
||||
Class = IOGAM
|
||||
InputSignals = {
|
||||
Sig1 = { DataSource = MyDS Type = uint32 } // Consumes initialized signal
|
||||
}
|
||||
}
|
||||
+App = {
|
||||
Class = RealTimeApplication
|
||||
+States = {
|
||||
Class = ReferenceContainer
|
||||
+State1 = {
|
||||
Class = RealTimeState
|
||||
+Thread1 = {
|
||||
Class = RealTimeThread
|
||||
Functions = { GAM1, GAM2 } // Should Pass
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
pt := index.NewProjectTree()
|
||||
p := parser.NewParser(content)
|
||||
cfg, err := p.Parse()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
pt.AddFile("main.marte", cfg)
|
||||
|
||||
v := validator.NewValidator(pt, ".")
|
||||
v.ValidateProject()
|
||||
|
||||
for _, d := range v.Diagnostics {
|
||||
if strings.Contains(d.Message, "before being produced") {
|
||||
t.Errorf("Unexpected error: %s", d.Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestINOUTValueTypeMismatch(t *testing.T) {
|
||||
content := `
|
||||
+Data = { Class = ReferenceContainer +DS = { Class = GAMDataSource #meta = { multithreaded = false } Signals = { S = { Type = uint8 } } } }
|
||||
+GAM1 = {
|
||||
Class = IOGAM
|
||||
InputSignals = {
|
||||
S = { DataSource = DS Type = uint8 Value = 1024 }
|
||||
}
|
||||
}
|
||||
+App = { Class = RealTimeApplication +States = { Class = ReferenceContainer +S = { Class = RealTimeState Threads = { +T = { Class = RealTimeThread Functions = { GAM1 } } } } } }
|
||||
`
|
||||
pt := index.NewProjectTree()
|
||||
p := parser.NewParser(content)
|
||||
cfg, err := p.Parse()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
pt.AddFile("fail.marte", cfg)
|
||||
|
||||
v := validator.NewValidator(pt, ".")
|
||||
v.ValidateProject()
|
||||
|
||||
found := false
|
||||
for _, d := range v.Diagnostics {
|
||||
if strings.Contains(d.Message, "Value initialization mismatch") {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Error("Expected Value initialization mismatch error")
|
||||
}
|
||||
}
|
||||
46
test/validator_unused_value_test.go
Normal file
46
test/validator_unused_value_test.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/marte-community/marte-dev-tools/internal/index"
|
||||
"github.com/marte-community/marte-dev-tools/internal/parser"
|
||||
"github.com/marte-community/marte-dev-tools/internal/validator"
|
||||
)
|
||||
|
||||
func TestUnusedGAMValueValidation(t *testing.T) {
|
||||
content := `
|
||||
+Data = {
|
||||
Class = ReferenceContainer
|
||||
+DS = { Class = GAMDataSource Signals = { S = { Type = uint8 } } }
|
||||
}
|
||||
+UnusedGAM = {
|
||||
Class = IOGAM
|
||||
InputSignals = {
|
||||
S = { DataSource = DS Type = uint8 Value = 1024 }
|
||||
}
|
||||
}
|
||||
+App = { Class = RealTimeApplication }
|
||||
`
|
||||
pt := index.NewProjectTree()
|
||||
p := parser.NewParser(content)
|
||||
cfg, err := p.Parse()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
pt.AddFile("unused.marte", cfg)
|
||||
|
||||
v := validator.NewValidator(pt, ".")
|
||||
v.ValidateProject()
|
||||
|
||||
found := false
|
||||
for _, d := range v.Diagnostics {
|
||||
if strings.Contains(d.Message, "Value initialization mismatch") {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Error("Expected Value initialization mismatch error for unused GAM")
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user