Compare commits

...

17 Commits

Author SHA1 Message Date
Martino Ferrari
7ae701e8c1 Auto doc... 2026-02-02 18:22:52 +01:00
Martino Ferrari
23ddbc0e91 Implemented inlay hints 2026-02-02 18:18:50 +01:00
Martino Ferrari
ee9235c24d Improved doc 2026-02-02 17:35:26 +01:00
Martino Ferrari
749eab0a32 Better formatting and expression handling 2026-02-02 17:22:39 +01:00
Martino Ferrari
12615aa6d2 better expression handling in lsp 2026-02-02 16:09:50 +01:00
Martino Ferrari
bd845aa859 Added hover with expression and improved implicit signal referencing and validation 2026-02-02 16:06:24 +01:00
Martino Ferrari
b879766021 Improved test 2026-02-02 15:20:41 +01:00
Martino Ferrari
d2b2750833 Full expression and validation support 2026-02-02 14:53:35 +01:00
Martino Ferrari
55ca313b73 added suggestion for variables 2026-02-02 14:37:03 +01:00
Martino Ferrari
ff19fef779 Fixed isolated file indexing 2026-02-02 14:26:19 +01:00
Martino Ferrari
d4075ff809 better multi file variable support 2026-01-30 18:45:11 +01:00
Martino Ferrari
f121f7c15d Implemented more robust LSP diagnostics and better parsing logic 2026-01-30 18:21:24 +01:00
Martino Ferrari
b4d3edab9d Improving LSP 2026-01-30 15:36:27 +01:00
Martino Ferrari
ee9674a7bc take in account Value field for producer 2026-01-30 15:06:18 +01:00
Martino Ferrari
d98593e67b Addeed verification before building 2026-01-30 15:01:30 +01:00
Martino Ferrari
a55c4b9c7c added local pragma for consumer 2026-01-30 14:52:44 +01:00
Martino Ferrari
6fa67abcb4 Implemented pragmas for not_produced not_consumed signals 2026-01-30 14:42:26 +01:00
45 changed files with 3272 additions and 584 deletions

View File

@@ -10,7 +10,7 @@ build:
go build -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/mdt go build -o $(BUILD_DIR)/$(BINARY_NAME) ./cmd/mdt
test: test:
go test -v ./... go test -v ./test/...
coverage: coverage:
go test -cover -coverprofile=coverage.out ./test/... -coverpkg=./internal/... go test -cover -coverprofile=coverage.out ./test/... -coverpkg=./internal/...

View File

@@ -5,7 +5,7 @@
## Features ## Features
- **Portability**: A single statically compiled executable compatible with any Linux 3.2+ machine (as well as possible to compile and run on Windows and Mac OS X) - **Portability**: A single statically compiled executable compatible with any Linux 3.2+ machine (as well as possible to compile and run on Windows and Mac OS X)
- **LSP Server**: Real-time syntax checking, validation, autocomplete, hover documentation, and navigation (Go to Definition/References). - **LSP Server**: Real-time syntax checking, validation, autocomplete, hover documentation, navigation (Go to Definition/References), and Inlay Hints (inline types and evaluation).
- **Builder**: Merges multiple configuration files into a single, ordered output file. - **Builder**: Merges multiple configuration files into a single, ordered output file.
- **Formatter**: Standardizes configuration file formatting. - **Formatter**: Standardizes configuration file formatting.
- **Validator**: Advanced semantic validation using [CUE](https://cuelang.org/) schemas, ensuring type safety and structural correctness. - **Validator**: Advanced semantic validation using [CUE](https://cuelang.org/) schemas, ensuring type safety and structural correctness.
@@ -16,9 +16,13 @@ Few additional features have been added to the standard MARTe configuration lang
- Multi file configuration support - Multi file configuration support
- Multi file definition merging - Multi file definition merging
- File level namespace / node - File level namespace / node (`#package`)
- Doc-strings support - Variables and Constants
- Pragmas for warning suppression / documentation - Overrideable variables (`#var`)
- Fixed constants (`#let`)
- Powerful expressions (arithmetic, bitwise, string concatenation)
- Doc-strings support (`//#`) for objects, fields, and variables
- Pragmas (`//!`) for warning suppression / documentation
## Documentation ## Documentation

View File

@@ -72,6 +72,45 @@ func runBuild(args []string) {
os.Exit(1) os.Exit(1)
} }
// 1. Run Validation
tree := index.NewProjectTree()
for _, file := range files {
content, err := os.ReadFile(file)
if err != nil {
logger.Printf("Error reading %s: %v\n", file, err)
os.Exit(1)
}
p := parser.NewParser(string(content))
config, err := p.Parse()
if err != nil {
logger.Printf("%s: Grammar error: %v\n", file, err)
os.Exit(1)
}
tree.AddFile(file, config)
}
v := validator.NewValidator(tree, ".")
v.ValidateProject()
hasErrors := false
for _, diag := range v.Diagnostics {
level := "ERROR"
if diag.Level == validator.LevelWarning {
level = "WARNING"
} else {
hasErrors = true
}
logger.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message)
}
if hasErrors {
logger.Println("Build failed due to validation errors.")
os.Exit(1)
}
// 2. Perform Build
b := builder.NewBuilder(files, overrides) b := builder.NewBuilder(files, overrides)
var out *os.File = os.Stdout var out *os.File = os.Stdout
@@ -99,6 +138,7 @@ func runCheck(args []string) {
} }
tree := index.NewProjectTree() tree := index.NewProjectTree()
syntaxErrors := 0
for _, file := range args { for _, file := range args {
content, err := os.ReadFile(file) content, err := os.ReadFile(file)
@@ -108,13 +148,17 @@ func runCheck(args []string) {
} }
p := parser.NewParser(string(content)) p := parser.NewParser(string(content))
config, err := p.Parse() config, _ := p.Parse()
if err != nil { if len(p.Errors()) > 0 {
logger.Printf("%s: Grammar error: %v\n", file, err) syntaxErrors += len(p.Errors())
continue for _, e := range p.Errors() {
logger.Printf("%s: Grammar error: %v\n", file, e)
}
} }
tree.AddFile(file, config) if config != nil {
tree.AddFile(file, config)
}
} }
v := validator.NewValidator(tree, ".") v := validator.NewValidator(tree, ".")
@@ -128,8 +172,9 @@ func runCheck(args []string) {
logger.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message) logger.Printf("%s:%d:%d: %s: %s\n", diag.File, diag.Position.Line, diag.Position.Column, level, diag.Message)
} }
if len(v.Diagnostics) > 0 { totalIssues := len(v.Diagnostics) + syntaxErrors
logger.Printf("\nFound %d issues.\n", len(v.Diagnostics)) if totalIssues > 0 {
logger.Printf("\nFound %d issues.\n", totalIssues)
} else { } else {
logger.Println("No issues found.") logger.Println("No issues found.")
} }

View File

@@ -32,18 +32,19 @@ internal/
Responsible for converting MARTe configuration text into structured data. Responsible for converting MARTe configuration text into structured data.
* **Lexer (`lexer.go`)**: Tokenizes the input stream. Handles MARTe specific syntax like `#package`, `//!` pragmas, and `//#` docstrings. Supports standard identifiers and `#`-prefixed identifiers. * **Lexer (`lexer.go`)**: Tokenizes the input stream. Handles MARTe specific syntax like `#package`, `#let`, `//!` pragmas, and `//#` docstrings. Supports standard identifiers and `#`-prefixed identifiers. Recognizes advanced number formats (hex `0x`, binary `0b`).
* **Parser (`parser.go`)**: Recursive descent parser. Converts tokens into a `Configuration` object containing definitions, comments, and pragmas. * **Parser (`parser.go`)**: Recursive descent parser. Converts tokens into a `Configuration` object containing definitions, comments, and pragmas. Implements expression parsing with precedence.
* **AST (`ast.go`)**: Defines the node types (`ObjectNode`, `Field`, `Value`, `VariableDefinition`, etc.). All nodes implement the `Node` interface providing position information. * **AST (`ast.go`)**: Defines the node types (`ObjectNode`, `Field`, `Value`, `VariableDefinition`, `BinaryExpression`, etc.). All nodes implement the `Node` interface providing position information.
### 2. `internal/index` ### 2. `internal/index`
The brain of the system. It maintains a holistic view of the project. The brain of the system. It maintains a holistic view of the project.
* **ProjectTree**: The central data structure. It holds the root of the configuration hierarchy (`Root`), references, and isolated files. * **ProjectTree**: The central data structure. It holds the root of the configuration hierarchy (`Root`), references, and isolated files.
* **ProjectNode**: Represents a logical node in the configuration. Since a node can be defined across multiple files (fragments), `ProjectNode` aggregates these fragments. It also stores locally defined variables in its `Variables` map. * **ScanDirectory**: Recursively walks the project directory to find all `.marte` files, adding them to the tree even if they contain partial syntax errors.
* **ProjectNode**: Represents a logical node in the configuration. Since a node can be defined across multiple files (fragments), `ProjectNode` aggregates these fragments. It also stores locally defined variables and constants in its `Variables` map.
* **NodeMap**: A hash map index (`map[string][]*ProjectNode`) for $O(1)$ symbol lookups, optimizing `FindNode` operations. * **NodeMap**: A hash map index (`map[string][]*ProjectNode`) for $O(1)$ symbol lookups, optimizing `FindNode` operations.
* **Reference Resolution**: The `ResolveReferences` method links `Reference` objects to their target `ProjectNode` or `VariableDefinition`. It uses `resolveScopedName` to respect lexical scoping rules, searching up the hierarchy from the reference's container. * **Reference Resolution**: The `ResolveReferences` method links `Reference` objects to their target `ProjectNode` or `VariableDefinition`. It uses `ResolveName` (exported) which respects lexical scoping rules by searching the hierarchy upwards from the reference's container, using `FindNode` for deep searches within each scope.
### 3. `internal/validator` ### 3. `internal/validator`
@@ -53,10 +54,10 @@ Ensures configuration correctness.
* **Checks**: * **Checks**:
* **Structure**: Duplicate fields, invalid content. * **Structure**: Duplicate fields, invalid content.
* **Schema**: Unifies nodes with CUE schemas (loaded via `internal/schema`) to validate types and mandatory fields. * **Schema**: Unifies nodes with CUE schemas (loaded via `internal/schema`) to validate types and mandatory fields.
* **Signals**: Verifies that signals referenced in GAMs exist in DataSources and match types. * **Signals**: Verifies that signals referenced in GAMs exist in DataSources and match types. Performs project-wide consistency checks for implicit signals.
* **Threading**: Checks `CheckDataSourceThreading` to ensure non-multithreaded DataSources are not shared across threads in the same state. * **Threading**: Checks `CheckDataSourceThreading` to ensure non-multithreaded DataSources are not shared across threads in the same state.
* **Ordering**: `CheckINOUTOrdering` verifies that for `INOUT` signals, the producing GAM appears before the consuming GAM in the thread's execution list. * **Ordering**: `CheckINOUTOrdering` verifies that for `INOUT` signals, the producing GAM appears before the consuming GAM in the thread's execution list.
* **Variables**: `CheckVariables` validates variable values against their defined CUE types (e.g. `uint`, regex). `CheckUnresolvedVariables` ensures all used variables are defined. * **Variables**: `CheckVariables` validates variable values against their defined CUE types. Prevents external overrides of `#let` constants. `CheckUnresolvedVariables` ensures all used variables are defined.
* **Unused**: Detects unused GAMs and Signals (suppressible via pragmas). * **Unused**: Detects unused GAMs and Signals (suppressible via pragmas).
### 4. `internal/lsp` ### 4. `internal/lsp`
@@ -64,11 +65,13 @@ Ensures configuration correctness.
Implements the Language Server Protocol. Implements the Language Server Protocol.
* **Server (`server.go`)**: Handles JSON-RPC messages over stdio. * **Server (`server.go`)**: Handles JSON-RPC messages over stdio.
* **Evaluation**: Implements a lightweight expression evaluator to show evaluated values in Hover and completion snippets.
* **Incremental Sync**: Supports `textDocumentSync: 2`. `HandleDidChange` applies patches to the in-memory document buffers using `offsetAt` logic. * **Incremental Sync**: Supports `textDocumentSync: 2`. `HandleDidChange` applies patches to the in-memory document buffers using `offsetAt` logic.
* **Features**: * **Features**:
* `HandleCompletion`: Context-aware suggestions (Schema fields, Signal references, Class names). * `HandleCompletion`: Context-aware suggestions (Macros, Schema fields, Signal references, Class names).
* `HandleHover`: Shows documentation, signal types, and usage analysis (e.g., "Used in GAMs: Controller (Input)"). * `HandleHover`: Shows documentation (including docstrings for variables), evaluated signal types/dimensions, and usage analysis.
* `HandleDefinition` / `HandleReferences`: specific lookup using the `index`. * `HandleDefinition` / `HandleReferences`: specific lookup using the `index`.
* `HandleRename`: Project-wide renaming supporting objects, fields, and signals (including implicit ones).
### 5. `internal/builder` ### 5. `internal/builder`
@@ -76,6 +79,7 @@ Merges multiple MARTe files into a single output.
* **Logic**: It parses all input files, builds a temporary `ProjectTree`, and then reconstructs the source code. * **Logic**: It parses all input files, builds a temporary `ProjectTree`, and then reconstructs the source code.
* **Merging**: It interleaves fields and subnodes from different file fragments to produce a coherent single-file configuration, respecting the `#package` hierarchy. * **Merging**: It interleaves fields and subnodes from different file fragments to produce a coherent single-file configuration, respecting the `#package` hierarchy.
* **Evaluation**: Evaluates all expressions and variable references into concrete MARTe values in the final output. Prevents overrides of `#let` constants.
### 6. `internal/schema` ### 6. `internal/schema`
@@ -100,12 +104,13 @@ Manages CUE schemas.
5. Diagnostics are printed (CLI) or published via `textDocument/publishDiagnostics` (LSP). 5. Diagnostics are printed (CLI) or published via `textDocument/publishDiagnostics` (LSP).
### Threading Check Logic ### Threading Check Logic
1. Finds the `RealTimeApplication` node. 1. Iterates all `RealTimeApplication` nodes found in the project.
2. Iterates through `States` and `Threads`. 2. For each App:
3. For each Thread, resolves the `Functions` (GAMs). 1. Finds `States` and `Threads`.
4. For each GAM, resolves connected `DataSources` via Input/Output signals. 2. For each Thread, resolves the `Functions` (GAMs).
5. Maps `DataSource -> Thread` within the context of a State. 3. For each GAM, resolves connected `DataSources` via Input/Output signals.
6. If a DataSource is seen in >1 Thread, it checks the `#meta.multithreaded` property. If false (default), an error is raised. 4. Maps `DataSource -> Thread` within the context of a State.
5. If a DataSource is seen in >1 Thread, it checks the `#meta.multithreaded` property. If false (default), an error is raised.
### INOUT Ordering Logic ### INOUT Ordering Logic
1. Iterates Threads. 1. Iterates Threads.

View File

@@ -20,22 +20,13 @@ Objects are defined using `+` (public/instantiated) or `$` (template/class-like)
### Fields and Values ### Fields and Values
- **Fields**: Alphanumeric identifiers (e.g., `Timeout`, `CycleTime`). - **Fields**: Alphanumeric identifiers (e.g., `Timeout`, `CycleTime`).
- **Values**: - **Values**:
- Integers: `10`, `-5`, `0xFA` - Integers: `10`, `-5`, `0xFA`, `0b1011`
- Floats: `3.14`, `1e-3` - Floats: `3.14`, `1e-3`
- Strings: `"Text"` - Strings: `"Text"`
- Booleans: `true`, `false` - Booleans: `true`, `false`
- References: `MyObject`, `MyObject.SubNode` - References: `MyObject`, `MyObject.SubNode`
- Arrays: `{ 1 2 3 }` or `{ "A" "B" }` - Arrays: `{ 1 2 3 }` or `{ "A" "B" }`
### Comments and Documentation
- Line comments: `// This is a comment`
- Docstrings: `//# This documents the following node`. These appear in hover tooltips.
```marte
//# This is the main application
+App = { ... }
```
## 2. Signals and Data Flow ## 2. Signals and Data Flow
Signals define how data moves between DataSources (drivers) and GAMs (algorithms). Signals define how data moves between DataSources (drivers) and GAMs (algorithms).
@@ -73,14 +64,99 @@ GAMs declare inputs and outputs. You can refer to signals directly or alias them
} }
``` ```
### Threading Rules ## 3. Multi-file Projects
**Validation Rule**: A DataSource that is **not** marked as multithreaded (default) cannot be used by GAMs running in different threads within the same State.
**Ordering Rule**: For `INOUT` signals (data dependency within a thread), the Producer GAM must appear **before** the Consumer GAM in the thread's `Functions` list. This ensures correct data flow within the cycle. This rule is skipped if the DataSource is marked as `multithreaded: true`. You can split your configuration into multiple files.
To allow sharing, the DataSource class in the schema must have `#meta: multithreaded: true`. ### Namespaces
Use `#package` to define where the file's content fits in the hierarchy.
## 3. Schemas and Validation **file1.marte**
```marte
#package MyApp.Controller
+MyController = { ... }
```
This places `MyController` under `MyApp.Controller`.
### Building
The `build` command merges all files.
```bash
mdt build -o final.marte src/*.marte
```
## 4. Variables and Constants
You can define variables to parameterize your configuration.
### Variables (`#var`)
Variables can be defined at any level and can be overridden externally (e.g., via CLI).
```marte
//# Default timeout
#var Timeout: uint32 = 100
+MyObject = {
Class = Timer
Timeout = $Timeout
}
```
### Constants (`#let`)
Constants are like variables but **cannot** be overridden externally. They are ideal for internal calculations or fixed parameters.
```marte
//# Sampling period
#let Ts: float64 = 0.001
+Clock = {
Class = HighResClock
Period = @Ts
}
```
### Reference Syntax
Reference a variable or constant using `$` or `@`:
```marte
Field = $MyVar
// or
Field = @MyVar
```
### Expressions
You can use operators in field values. Supported operators:
- **Math**: `+`, `-`, `*`, `/`, `%`, `^` (XOR), `&`, `|` (Bitwise)
- **String Concatenation**: `..`
- **Parentheses**: `(...)` for grouping
```marte
Field1 = 10 + 20 * 2 // 50
Field2 = "Hello " .. "World"
Field3 = ($MyVar + 5) * 2
```
### Build Override
You can override variable values during build (only for `#var`):
```bash
mdt build -vMyVar=200 src/*.marte
```
## 5. Comments and Documentation
- Line comments: `// This is a comment`
- Docstrings: `//# This documents the following node`. These appear in hover tooltips.
```marte
//# This is the main application
+App = { ... }
```
Docstrings work for objects, fields, variables, and constants.
## 6. Schemas and Validation
`mdt` validates your configuration against CUE schemas. `mdt` validates your configuration against CUE schemas.
@@ -112,29 +188,7 @@ package schema
} }
``` ```
## 4. Multi-file Projects ## 7. Pragmas (Suppressing Warnings)
You can split your configuration into multiple files.
### Namespaces
Use `#package` to define where the file's content fits in the hierarchy.
**file1.marte**
```marte
#package MyApp.Controller
+MyController = { ... }
```
This places `MyController` under `MyApp.Controller`.
### Building
The `build` command merges all files.
```bash
mdt build -o final.marte src/*.marte
```
## 5. Pragmas (Suppressing Warnings)
If validation is too strict, you can suppress warnings using pragmas (`//!`). If validation is too strict, you can suppress warnings using pragmas (`//!`).
@@ -163,37 +217,39 @@ If validation is too strict, you can suppress warnings using pragmas (`//!`).
} }
``` ```
## 6. Variables - **Global Suppression**:
```marte
//! allow(unused)
//! allow(implicit)
```
You can define variables using `#var`. The type expression supports CUE syntax. ## 8. Validation Rules (Detail)
```marte ### Data Flow Validation
#var MyVar: uint32 = 100 `mdt` checks for logical data flow errors:
#var Env: "PROD" | "DEV" = "DEV" - **Consumed before Produced**: If a GAM reads an INOUT signal that hasn't been written by a previous GAM in the same cycle, an error is reported.
``` - **Produced but not Consumed**: If a GAM writes an INOUT signal that is never read by subsequent GAMs, a warning is reported.
- **Initialization**: Providing a `Value` field in an `InputSignal` treats it as "produced" (initialized), resolving "Consumed before Produced" errors.
### Usage ### Threading Rules
Reference a variable using `@`: A DataSource that is **not** marked as multithreaded (default) cannot be used by GAMs running in different threads within the same State.
```marte To allow sharing, the DataSource class in the schema must have `#meta: multithreaded: true`.
Field = @MyVar
```
### Expressions ### Implicit vs Explicit Signals
You can use operators in field values. Supported operators: - **Explicit**: Signal defined in `DataSource.Signals`.
- **Math**: `+`, `-`, `*`, `/`, `%`, `^` (XOR), `&`, `|` (Bitwise) - **Implicit**: Signal used in GAM but not defined in DataSource. `mdt` reports a warning unless suppressed.
- **String Concatenation**: `..` - **Consistency**: All references to the same logical signal (same name in same DataSource) must share the same `Type` and size properties.
```marte ## 9. Editor Features (LSP)
Field1 = 10 + 20 * 2 // 50
Field2 = "Hello " .. "World"
Field3 = @MyVar + 5
```
### Build Override The `mdt` LSP server provides several features to improve productivity.
You can override variable values during build:
```bash ### Inlay Hints
mdt build -vMyVar=200 -vEnv="PROD" src/*.marte Inlay hints provide real-time contextual information directly in the editor:
```
- **Signal Metadata**: Signal usages in GAMs display their evaluated type and size, e.g., `Sig1` **`::uint32[10x1]`**.
- **Object Class**: References to objects show the object's class, e.g., `DataSource = ` **`FileReader::`** `DS`.
- **Expression Evaluation**:
- Complex expressions show their result at the end of the line, e.g., `Expr = 10 + 20` **` => 30`**.
- Variable references show their current value inline, e.g., `@MyVar` **`(=> 10)`**.

View File

@@ -2,11 +2,12 @@
`mdt` includes a Language Server Protocol (LSP) implementation that provides features like: `mdt` includes a Language Server Protocol (LSP) implementation that provides features like:
- Syntax highlighting and error reporting - Syntax highlighting and error reporting (Parser & Semantic)
- Auto-completion - Auto-completion
- Go to Definition / References - Go to Definition / References
- Hover documentation - Hover documentation
- Symbol renaming - Symbol renaming
- Incremental synchronization (Robust)
The LSP server is started via the command: The LSP server is started via the command:

View File

@@ -148,7 +148,46 @@ make build
This produces `app.marte` (or `final_app.marte`), which contains the flattened, merged configuration ready for the MARTe framework. This produces `app.marte` (or `final_app.marte`), which contains the flattened, merged configuration ready for the MARTe framework.
## Step 6: Advanced - Custom Schema ## Step 6: Using Variables and Expressions
You can parameterize your application using variables. Let's define a constant for the sampling frequency.
Modify `src/app.marte`:
```marte
#package MyContollApp
//# Sampling frequency in Hz
#let SamplingFreq: uint32 = 100
+App = {
// ...
+Functions = {
+Converter = {
Class = IOGAM
InputSignals = {
TimeIn = {
DataSource = Timer
Type = uint32
Frequency = $SamplingFreq
Alias = Time
}
}
// ...
}
}
}
```
You can also use expressions for calculations:
```marte
#let CycleTime: float64 = 1.0 / $SamplingFreq
```
LSP will show you the evaluated values directly in the code via **Inlay Hints** (e.g., `CycleTime: 0.01`) and in the hover documentation.
## Step 7: Advanced - Custom Schema
Suppose you want to enforce that your DataSources support multithreading. You can modify `.marte_schema.cue`. Suppose you want to enforce that your DataSources support multithreading. You can modify `.marte_schema.cue`.

2
go.mod
View File

@@ -1,6 +1,6 @@
module github.com/marte-community/marte-dev-tools module github.com/marte-community/marte-dev-tools
go 1.25.6 go 1.25
require cuelang.org/go v0.15.3 require cuelang.org/go v0.15.3

View File

@@ -213,17 +213,21 @@ func (b *Builder) collectVariables(tree *index.ProjectTree) {
for _, def := range frag.Definitions { for _, def := range frag.Definitions {
if vdef, ok := def.(*parser.VariableDefinition); ok { if vdef, ok := def.(*parser.VariableDefinition); ok {
if valStr, ok := b.Overrides[vdef.Name]; ok { if valStr, ok := b.Overrides[vdef.Name]; ok {
p := parser.NewParser("Temp = " + valStr) if !vdef.IsConst {
cfg, _ := p.Parse() p := parser.NewParser("Temp = " + valStr)
if len(cfg.Definitions) > 0 { cfg, _ := p.Parse()
if f, ok := cfg.Definitions[0].(*parser.Field); ok { if len(cfg.Definitions) > 0 {
b.variables[vdef.Name] = f.Value if f, ok := cfg.Definitions[0].(*parser.Field); ok {
continue b.variables[vdef.Name] = f.Value
continue
}
} }
} }
} }
if vdef.DefaultValue != nil { if vdef.DefaultValue != nil {
b.variables[vdef.Name] = vdef.DefaultValue if _, ok := b.variables[vdef.Name]; !ok || vdef.IsConst {
b.variables[vdef.Name] = vdef.DefaultValue
}
} }
} }
} }
@@ -255,24 +259,7 @@ func (b *Builder) compute(left parser.Value, op parser.Token, right parser.Value
return &parser.StringValue{Value: s1 + s2, Quoted: true} return &parser.StringValue{Value: s1 + s2, Quoted: true}
} }
lF, lIsF := b.valToFloat(left) // Try Integer arithmetic first
rF, rIsF := b.valToFloat(right)
if lIsF || rIsF {
res := 0.0
switch op.Type {
case parser.TokenPlus:
res = lF + rF
case parser.TokenMinus:
res = lF - rF
case parser.TokenStar:
res = lF * rF
case parser.TokenSlash:
res = lF / rF
}
return &parser.FloatValue{Value: res, Raw: fmt.Sprintf("%g", res)}
}
lI, lIsI := b.valToInt(left) lI, lIsI := b.valToInt(left)
rI, rIsI := b.valToInt(right) rI, rIsI := b.valToInt(right)
@@ -303,6 +290,25 @@ func (b *Builder) compute(left parser.Value, op parser.Token, right parser.Value
return &parser.IntValue{Value: res, Raw: fmt.Sprintf("%d", res)} return &parser.IntValue{Value: res, Raw: fmt.Sprintf("%d", res)}
} }
// Fallback to Float arithmetic
lF, lIsF := b.valToFloat(left)
rF, rIsF := b.valToFloat(right)
if lIsF || rIsF {
res := 0.0
switch op.Type {
case parser.TokenPlus:
res = lF + rF
case parser.TokenMinus:
res = lF - rF
case parser.TokenStar:
res = lF * rF
case parser.TokenSlash:
res = lF / rF
}
return &parser.FloatValue{Value: res, Raw: fmt.Sprintf("%g", res)}
}
return left return left
} }

View File

@@ -45,17 +45,15 @@ func Format(config *parser.Configuration, w io.Writer) {
} }
func fixComment(text string) string { func fixComment(text string) string {
if strings.HasPrefix(text, "//!") { if !strings.HasPrefix(text, "//!") {
if len(text) > 3 && text[3] != ' ' { if strings.HasPrefix(text, "//#") {
return "//! " + text[3:] if len(text) > 3 && text[3] != ' ' {
} return "//# " + text[3:]
} else if strings.HasPrefix(text, "//#") { }
if len(text) > 3 && text[3] != ' ' { } else if strings.HasPrefix(text, "//") {
return "//# " + text[3:] if len(text) > 2 && text[2] != ' ' && text[2] != '#' && text[2] != '!' {
} return "// " + text[2:]
} else if strings.HasPrefix(text, "//") { }
if len(text) > 2 && text[2] != ' ' && text[2] != '#' && text[2] != '!' {
return "// " + text[2:]
} }
} }
return text return text
@@ -105,7 +103,11 @@ func (f *Formatter) formatDefinition(def parser.Definition, indent int) int {
fmt.Fprintf(f.writer, "%s}", indentStr) fmt.Fprintf(f.writer, "%s}", indentStr)
return d.Subnode.EndPosition.Line return d.Subnode.EndPosition.Line
case *parser.VariableDefinition: case *parser.VariableDefinition:
fmt.Fprintf(f.writer, "%s#var %s: %s", indentStr, d.Name, d.TypeExpr) macro := "#var"
if d.IsConst {
macro = "#let"
}
fmt.Fprintf(f.writer, "%s%s %s: %s", indentStr, macro, d.Name, d.TypeExpr)
if d.DefaultValue != nil { if d.DefaultValue != nil {
fmt.Fprint(f.writer, " = ") fmt.Fprint(f.writer, " = ")
endLine := f.formatValue(d.DefaultValue, indent) endLine := f.formatValue(d.DefaultValue, indent)
@@ -153,6 +155,15 @@ func (f *Formatter) formatValue(val parser.Value, indent int) int {
case *parser.VariableReferenceValue: case *parser.VariableReferenceValue:
fmt.Fprint(f.writer, v.Name) fmt.Fprint(f.writer, v.Name)
return v.Position.Line return v.Position.Line
case *parser.BinaryExpression:
f.formatValue(v.Left, indent)
fmt.Fprintf(f.writer, " %s ", v.Operator.Value)
f.formatValue(v.Right, indent)
return v.Position.Line
case *parser.UnaryExpression:
fmt.Fprint(f.writer, v.Operator.Value)
f.formatValue(v.Right, indent)
return v.Position.Line
case *parser.ArrayValue: case *parser.ArrayValue:
fmt.Fprint(f.writer, "{ ") fmt.Fprint(f.writer, "{ ")
for i, e := range v.Elements { for i, e := range v.Elements {

View File

@@ -5,12 +5,14 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/marte-community/marte-dev-tools/internal/logger"
"github.com/marte-community/marte-dev-tools/internal/parser" "github.com/marte-community/marte-dev-tools/internal/parser"
) )
type VariableInfo struct { type VariableInfo struct {
Def *parser.VariableDefinition Def *parser.VariableDefinition
File string File string
Doc string
} }
type ProjectTree struct { type ProjectTree struct {
@@ -27,13 +29,14 @@ func (pt *ProjectTree) ScanDirectory(rootPath string) error {
return err return err
} }
if !info.IsDir() && strings.HasSuffix(info.Name(), ".marte") { if !info.IsDir() && strings.HasSuffix(info.Name(), ".marte") {
logger.Printf("indexing: %s [%s]\n", info.Name(), path)
content, err := os.ReadFile(path) content, err := os.ReadFile(path)
if err != nil { if err != nil {
return err // Or log and continue return err // Or log and continue
} }
p := parser.NewParser(string(content)) p := parser.NewParser(string(content))
config, err := p.Parse() config, _ := p.Parse()
if err == nil { if config != nil {
pt.AddFile(path, config) pt.AddFile(path, config)
} }
} }
@@ -232,7 +235,7 @@ func (pt *ProjectTree) populateNode(node *ProjectNode, file string, config *pars
pt.indexValue(file, d.Value) pt.indexValue(file, d.Value)
case *parser.VariableDefinition: case *parser.VariableDefinition:
fileFragment.Definitions = append(fileFragment.Definitions, d) fileFragment.Definitions = append(fileFragment.Definitions, d)
node.Variables[d.Name] = VariableInfo{Def: d, File: file} node.Variables[d.Name] = VariableInfo{Def: d, File: file, Doc: doc}
case *parser.ObjectNode: case *parser.ObjectNode:
fileFragment.Definitions = append(fileFragment.Definitions, d) fileFragment.Definitions = append(fileFragment.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
@@ -291,7 +294,7 @@ func (pt *ProjectTree) addObjectFragment(node *ProjectNode, file string, obj *pa
pt.extractFieldMetadata(node, d) pt.extractFieldMetadata(node, d)
case *parser.VariableDefinition: case *parser.VariableDefinition:
frag.Definitions = append(frag.Definitions, d) frag.Definitions = append(frag.Definitions, d)
node.Variables[d.Name] = VariableInfo{Def: d, File: file} node.Variables[d.Name] = VariableInfo{Def: d, File: file, Doc: subDoc}
case *parser.ObjectNode: case *parser.ObjectNode:
frag.Definitions = append(frag.Definitions, d) frag.Definitions = append(frag.Definitions, d)
norm := NormalizeName(d.Name) norm := NormalizeName(d.Name)
@@ -399,12 +402,18 @@ func (pt *ProjectTree) indexValue(file string, val parser.Value) {
File: file, File: file,
}) })
case *parser.VariableReferenceValue: case *parser.VariableReferenceValue:
name := strings.TrimPrefix(v.Name, "@")
pt.References = append(pt.References, Reference{ pt.References = append(pt.References, Reference{
Name: strings.TrimPrefix(v.Name, "@"), Name: name,
Position: v.Position, Position: v.Position,
File: file, File: file,
IsVariable: true, IsVariable: true,
}) })
case *parser.BinaryExpression:
pt.indexValue(file, v.Left)
pt.indexValue(file, v.Right)
case *parser.UnaryExpression:
pt.indexValue(file, v.Right)
case *parser.ArrayValue: case *parser.ArrayValue:
for _, elem := range v.Elements { for _, elem := range v.Elements {
pt.indexValue(file, elem) pt.indexValue(file, elem)
@@ -435,7 +444,7 @@ func (pt *ProjectTree) ResolveReferences() {
continue continue
} }
ref.Target = pt.resolveScopedName(container, ref.Name) ref.Target = pt.ResolveName(container, ref.Name, nil)
} }
} }
@@ -617,51 +626,19 @@ func (pt *ProjectTree) findNodeContaining(node *ProjectNode, file string, pos pa
return nil return nil
} }
func (pt *ProjectTree) resolveScopedName(ctx *ProjectNode, name string) *ProjectNode { func (pt *ProjectTree) ResolveName(ctx *ProjectNode, name string, predicate func(*ProjectNode) bool) *ProjectNode {
if ctx == nil { if ctx == nil {
return pt.FindNode(pt.Root, name, nil) return pt.FindNode(pt.Root, name, predicate)
} }
parts := strings.Split(name, ".")
first := parts[0]
normFirst := NormalizeName(first)
var startNode *ProjectNode
curr := ctx curr := ctx
for curr != nil { for curr != nil {
if child, ok := curr.Children[normFirst]; ok { if found := pt.FindNode(curr, name, predicate); found != nil {
startNode = child return found
break
} }
curr = curr.Parent curr = curr.Parent
} }
return nil
if startNode == nil && ctx != pt.Root {
if child, ok := pt.Root.Children[normFirst]; ok {
startNode = child
}
}
if startNode == nil {
// Fallback to deep search from context root
root := ctx
for root.Parent != nil {
root = root.Parent
}
return pt.FindNode(root, name, nil)
}
curr = startNode
for i := 1; i < len(parts); i++ {
norm := NormalizeName(parts[i])
if child, ok := curr.Children[norm]; ok {
curr = child
} else {
return nil
}
}
return curr
} }
func (pt *ProjectTree) ResolveVariable(ctx *ProjectNode, name string) *VariableInfo { func (pt *ProjectTree) ResolveVariable(ctx *ProjectNode, name string) *VariableInfo {
@@ -672,7 +649,7 @@ func (pt *ProjectTree) ResolveVariable(ctx *ProjectNode, name string) *VariableI
} }
curr = curr.Parent curr = curr.Parent
} }
if ctx == nil { if pt.Root != nil {
if v, ok := pt.Root.Variables[name]; ok { if v, ok := pt.Root.Variables[name]; ok {
return &v return &v
} }

View File

@@ -97,15 +97,30 @@ type TextDocumentContentChangeEvent struct {
Text string `json:"text"` Text string `json:"text"`
} }
type TextDocumentIdentifier struct {
URI string `json:"uri"`
}
type Position struct {
Line int `json:"line"`
Character int `json:"character"`
}
type Range struct {
Start Position `json:"start"`
End Position `json:"end"`
}
type Location struct {
URI string `json:"uri"`
Range Range `json:"range"`
}
type HoverParams struct { type HoverParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"` TextDocument TextDocumentIdentifier `json:"textDocument"`
Position Position `json:"position"` Position Position `json:"position"`
} }
type TextDocumentIdentifier struct {
URI string `json:"uri"`
}
type DefinitionParams struct { type DefinitionParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"` TextDocument TextDocumentIdentifier `json:"textDocument"`
Position Position `json:"position"` Position Position `json:"position"`
@@ -121,19 +136,17 @@ type ReferenceContext struct {
IncludeDeclaration bool `json:"includeDeclaration"` IncludeDeclaration bool `json:"includeDeclaration"`
} }
type Location struct { type InlayHintParams struct {
URI string `json:"uri"` TextDocument TextDocumentIdentifier `json:"textDocument"`
Range Range `json:"range"` Range Range `json:"range"`
} }
type Range struct { type InlayHint struct {
Start Position `json:"start"` Position Position `json:"position"`
End Position `json:"end"` Label string `json:"label"`
} Kind int `json:"kind,omitempty"` // 1: Parameter, 2: Type
PaddingLeft bool `json:"paddingLeft,omitempty"`
type Position struct { PaddingRight bool `json:"paddingRight,omitempty"`
Line int `json:"line"`
Character int `json:"character"`
} }
type Hover struct { type Hover struct {
@@ -264,8 +277,9 @@ func HandleMessage(msg *JsonRpcMessage) {
"referencesProvider": true, "referencesProvider": true,
"documentFormattingProvider": true, "documentFormattingProvider": true,
"renameProvider": true, "renameProvider": true,
"inlayHintProvider": true,
"completionProvider": map[string]any{ "completionProvider": map[string]any{
"triggerCharacters": []string{"=", " "}, "triggerCharacters": []string{"=", " ", "@"},
}, },
}, },
}) })
@@ -325,6 +339,11 @@ func HandleMessage(msg *JsonRpcMessage) {
if err := json.Unmarshal(msg.Params, &params); err == nil { if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, HandleRename(params)) respond(msg.ID, HandleRename(params))
} }
case "textDocument/inlayHint":
var params InlayHintParams
if err := json.Unmarshal(msg.Params, &params); err == nil {
respond(msg.ID, HandleInlayHint(params))
}
} }
} }
@@ -336,13 +355,9 @@ func HandleDidOpen(params DidOpenTextDocumentParams) {
path := uriToPath(params.TextDocument.URI) path := uriToPath(params.TextDocument.URI)
Documents[params.TextDocument.URI] = params.TextDocument.Text Documents[params.TextDocument.URI] = params.TextDocument.Text
p := parser.NewParser(params.TextDocument.Text) p := parser.NewParser(params.TextDocument.Text)
config, err := p.Parse() config, _ := p.Parse()
if err != nil { publishParserErrors(params.TextDocument.URI, p.Errors())
publishParserError(params.TextDocument.URI, err)
} else {
publishParserError(params.TextDocument.URI, nil)
}
if config != nil { if config != nil {
Tree.AddFile(path, config) Tree.AddFile(path, config)
@@ -369,13 +384,9 @@ func HandleDidChange(params DidChangeTextDocumentParams) {
Documents[uri] = text Documents[uri] = text
path := uriToPath(uri) path := uriToPath(uri)
p := parser.NewParser(text) p := parser.NewParser(text)
config, err := p.Parse() config, _ := p.Parse()
if err != nil { publishParserErrors(uri, p.Errors())
publishParserError(uri, err)
} else {
publishParserError(uri, nil)
}
if config != nil { if config != nil {
Tree.AddFile(path, config) Tree.AddFile(path, config)
@@ -465,6 +476,9 @@ func runValidation(_ string) {
// Collect all known files to ensure we clear diagnostics for fixed files // Collect all known files to ensure we clear diagnostics for fixed files
knownFiles := make(map[string]bool) knownFiles := make(map[string]bool)
collectFiles(Tree.Root, knownFiles) collectFiles(Tree.Root, knownFiles)
for _, node := range Tree.IsolatedFiles {
collectFiles(node, knownFiles)
}
// Initialize all known files with empty diagnostics // Initialize all known files with empty diagnostics
for f := range knownFiles { for f := range knownFiles {
@@ -473,8 +487,10 @@ func runValidation(_ string) {
for _, d := range v.Diagnostics { for _, d := range v.Diagnostics {
severity := 1 // Error severity := 1 // Error
levelStr := "ERROR"
if d.Level == validator.LevelWarning { if d.Level == validator.LevelWarning {
severity = 2 // Warning severity = 2 // Warning
levelStr = "WARNING"
} }
diag := LSPDiagnostic{ diag := LSPDiagnostic{
@@ -483,7 +499,7 @@ func runValidation(_ string) {
End: Position{Line: d.Position.Line - 1, Character: d.Position.Column - 1 + 10}, // Arbitrary length End: Position{Line: d.Position.Line - 1, Character: d.Position.Column - 1 + 10}, // Arbitrary length
}, },
Severity: severity, Severity: severity,
Message: d.Message, Message: fmt.Sprintf("%s: %s", levelStr, d.Message),
Source: "mdt", Source: "mdt",
} }
@@ -508,44 +524,36 @@ func runValidation(_ string) {
} }
} }
func publishParserError(uri string, err error) { func publishParserErrors(uri string, errors []error) {
if err == nil { diagnostics := []LSPDiagnostic{}
notification := JsonRpcMessage{
Jsonrpc: "2.0",
Method: "textDocument/publishDiagnostics",
Params: mustMarshal(PublishDiagnosticsParams{
URI: uri,
Diagnostics: []LSPDiagnostic{},
}),
}
send(notification)
return
}
var line, col int for _, err := range errors {
var msg string var line, col int
// Try parsing "line:col: message" var msg string
n, _ := fmt.Sscanf(err.Error(), "%d:%d: ", &line, &col) // Try parsing "line:col: message"
if n == 2 { n, _ := fmt.Sscanf(err.Error(), "%d:%d: ", &line, &col)
parts := strings.SplitN(err.Error(), ": ", 2) if n == 2 {
if len(parts) == 2 { parts := strings.SplitN(err.Error(), ": ", 2)
msg = parts[1] if len(parts) == 2 {
msg = parts[1]
}
} else {
// Fallback
line = 1
col = 1
msg = err.Error()
} }
} else {
// Fallback
line = 1
col = 1
msg = err.Error()
}
diag := LSPDiagnostic{ diag := LSPDiagnostic{
Range: Range{ Range: Range{
Start: Position{Line: line - 1, Character: col - 1}, Start: Position{Line: line - 1, Character: col - 1},
End: Position{Line: line - 1, Character: col}, End: Position{Line: line - 1, Character: col},
}, },
Severity: 1, // Error Severity: 1, // Error
Message: msg, Message: msg,
Source: "mdt-parser", Source: "mdt-parser",
}
diagnostics = append(diagnostics, diag)
} }
notification := JsonRpcMessage{ notification := JsonRpcMessage{
@@ -553,13 +561,16 @@ func publishParserError(uri string, err error) {
Method: "textDocument/publishDiagnostics", Method: "textDocument/publishDiagnostics",
Params: mustMarshal(PublishDiagnosticsParams{ Params: mustMarshal(PublishDiagnosticsParams{
URI: uri, URI: uri,
Diagnostics: []LSPDiagnostic{diag}, Diagnostics: diagnostics,
}), }),
} }
send(notification) send(notification)
} }
func collectFiles(node *index.ProjectNode, files map[string]bool) { func collectFiles(node *index.ProjectNode, files map[string]bool) {
if node == nil {
return
}
for _, frag := range node.Fragments { for _, frag := range node.Fragments {
files[frag.File] = true files[frag.File] = true
} }
@@ -584,6 +595,8 @@ func HandleHover(params HoverParams) *Hover {
return nil return nil
} }
container := Tree.GetNodeContaining(path, parser.Position{Line: line, Column: col})
var content string var content string
if res.Node != nil { if res.Node != nil {
@@ -595,9 +608,18 @@ func HandleHover(params HoverParams) *Hover {
} else if res.Field != nil { } else if res.Field != nil {
content = fmt.Sprintf("**Field**: `%s`", res.Field.Name) content = fmt.Sprintf("**Field**: `%s`", res.Field.Name)
} else if res.Variable != nil { } else if res.Variable != nil {
content = fmt.Sprintf("**Variable**: `%s`\nType: `%s`", res.Variable.Name, res.Variable.TypeExpr) kind := "Variable"
if res.Variable.IsConst {
kind = "Constant"
}
content = fmt.Sprintf("**%s**: `%s`\nType: `%s`", kind, res.Variable.Name, res.Variable.TypeExpr)
if res.Variable.DefaultValue != nil { if res.Variable.DefaultValue != nil {
content += fmt.Sprintf("\nDefault: `%s`", valueToString(res.Variable.DefaultValue)) content += fmt.Sprintf("\nDefault: `%s`", valueToString(res.Variable.DefaultValue, container))
}
if info := Tree.ResolveVariable(container, res.Variable.Name); info != nil {
if info.Doc != "" {
content += "\n\n" + info.Doc
}
} }
} else if res.Reference != nil { } else if res.Reference != nil {
targetName := "Unresolved" targetName := "Unresolved"
@@ -611,9 +633,18 @@ func HandleHover(params HoverParams) *Hover {
} else if res.Reference.TargetVariable != nil { } else if res.Reference.TargetVariable != nil {
v := res.Reference.TargetVariable v := res.Reference.TargetVariable
targetName = v.Name targetName = v.Name
fullInfo = fmt.Sprintf("**Variable**: `@%s`\nType: `%s`", v.Name, v.TypeExpr) kind := "Variable"
if v.IsConst {
kind = "Constant"
}
fullInfo = fmt.Sprintf("**%s**: `@%s`\nType: `%s`", kind, v.Name, v.TypeExpr)
if v.DefaultValue != nil { if v.DefaultValue != nil {
fullInfo += fmt.Sprintf("\nDefault: `%s`", valueToString(v.DefaultValue)) fullInfo += fmt.Sprintf("\nDefault: `%s`", valueToString(v.DefaultValue, container))
}
if info := Tree.ResolveVariable(container, res.Reference.Name); info != nil {
if info.Doc != "" {
fullInfo += "\n\n" + info.Doc
}
} }
} }
@@ -637,7 +668,8 @@ func HandleHover(params HoverParams) *Hover {
} }
} }
func valueToString(val parser.Value) string { func valueToString(val parser.Value, ctx *index.ProjectNode) string {
val = evaluate(val, ctx)
switch v := val.(type) { switch v := val.(type) {
case *parser.StringValue: case *parser.StringValue:
if v.Quoted { if v.Quoted {
@@ -657,7 +689,7 @@ func valueToString(val parser.Value) string {
case *parser.ArrayValue: case *parser.ArrayValue:
elements := []string{} elements := []string{}
for _, e := range v.Elements { for _, e := range v.Elements {
elements = append(elements, valueToString(e)) elements = append(elements, valueToString(e, ctx))
} }
return fmt.Sprintf("{ %s }", strings.Join(elements, " ")) return fmt.Sprintf("{ %s }", strings.Join(elements, " "))
default: default:
@@ -683,6 +715,31 @@ func HandleCompletion(params CompletionParams) *CompletionList {
prefix := lineStr[:col] prefix := lineStr[:col]
// Case 4: Top-level keywords/macros
if strings.HasPrefix(prefix, "#") && !strings.Contains(prefix, " ") {
return &CompletionList{
Items: []CompletionItem{
{Label: "#package", Kind: 14, InsertText: "#package ${1:Project.URI}", InsertTextFormat: 2, Detail: "Project namespace definition"},
{Label: "#var", Kind: 14, InsertText: "#var ${1:Name}: ${2:Type} = ${3:DefaultValue}", InsertTextFormat: 2, Detail: "Variable definition"},
{Label: "#let", Kind: 14, InsertText: "#let ${1:Name}: ${2:Type} = ${3:Value}", InsertTextFormat: 2, Detail: "Constant variable definition"},
},
}
}
// Case 3: Variable completion
varRegex := regexp.MustCompile(`([@])([a-zA-Z0-9_]*)$`)
if matches := varRegex.FindStringSubmatch(prefix); matches != nil {
container := Tree.GetNodeContaining(path, parser.Position{Line: params.Position.Line + 1, Column: col + 1})
if container == nil {
if iso, ok := Tree.IsolatedFiles[path]; ok {
container = iso
} else {
container = Tree.Root
}
}
return suggestVariables(container)
}
// Case 1: Assigning a value (Ends with "=" or "= ") // Case 1: Assigning a value (Ends with "=" or "= ")
if strings.Contains(prefix, "=") { if strings.Contains(prefix, "=") {
lastIdx := strings.LastIndex(prefix, "=") lastIdx := strings.LastIndex(prefix, "=")
@@ -724,9 +781,16 @@ func HandleCompletion(params CompletionParams) *CompletionList {
return nil return nil
} }
func suggestGAMSignals(_ *index.ProjectNode, direction string) *CompletionList { func suggestGAMSignals(container *index.ProjectNode, direction string) *CompletionList {
var items []CompletionItem var items []CompletionItem
// Find scope root
root := container
for root.Parent != nil {
root = root.Parent
}
var walk func(*index.ProjectNode)
processNode := func(node *index.ProjectNode) { processNode := func(node *index.ProjectNode) {
if !isDataSource(node) { if !isDataSource(node) {
return return
@@ -784,7 +848,13 @@ func suggestGAMSignals(_ *index.ProjectNode, direction string) *CompletionList {
} }
} }
Tree.Walk(processNode) walk = func(n *index.ProjectNode) {
processNode(n)
for _, child := range n.Children {
walk(child)
}
}
walk(root)
if len(items) > 0 { if len(items) > 0 {
return &CompletionList{Items: items} return &CompletionList{Items: items}
@@ -902,20 +972,41 @@ func suggestFieldValues(container *index.ProjectNode, field string, path string)
root = Tree.Root root = Tree.Root
} }
var items []CompletionItem
if field == "DataSource" { if field == "DataSource" {
return suggestObjects(root, "DataSource") if list := suggestObjects(root, "DataSource"); list != nil {
} items = append(items, list.Items...)
if field == "Functions" { }
return suggestObjects(root, "GAM") } else if field == "Functions" {
} if list := suggestObjects(root, "GAM"); list != nil {
if field == "Type" { items = append(items, list.Items...)
return suggestSignalTypes() }
} else if field == "Type" {
if list := suggestSignalTypes(); list != nil {
items = append(items, list.Items...)
}
} else {
if list := suggestCUEEnums(container, field); list != nil {
items = append(items, list.Items...)
}
} }
if list := suggestCUEEnums(container, field); list != nil { // Add variables
return list vars := suggestVariables(container)
if vars != nil {
for _, item := range vars.Items {
// Create copy to modify label
newItem := item
newItem.Label = "@" + newItem.Label
newItem.InsertText = "@" + item.Label
items = append(items, newItem)
}
} }
if len(items) > 0 {
return &CompletionList{Items: items}
}
return nil return nil
} }
@@ -1211,6 +1302,17 @@ func HandleReferences(params ReferenceParams) []Location {
return locations return locations
} }
func getEvaluatedMetadata(node *index.ProjectNode, key string) string {
for _, frag := range node.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok && f.Name == key {
return valueToString(f.Value, node)
}
}
}
return node.Metadata[key]
}
func formatNodeInfo(node *index.ProjectNode) string { func formatNodeInfo(node *index.ProjectNode) string {
info := "" info := ""
if class := node.Metadata["Class"]; class != "" { if class := node.Metadata["Class"]; class != "" {
@@ -1219,8 +1321,8 @@ func formatNodeInfo(node *index.ProjectNode) string {
info = fmt.Sprintf("`%s`\n\n", node.RealName) info = fmt.Sprintf("`%s`\n\n", node.RealName)
} }
// Check if it's a Signal (has Type or DataSource) // Check if it's a Signal (has Type or DataSource)
typ := node.Metadata["Type"] typ := getEvaluatedMetadata(node, "Type")
ds := node.Metadata["DataSource"] ds := getEvaluatedMetadata(node, "DataSource")
if ds == "" { if ds == "" {
if node.Parent != nil && node.Parent.Name == "Signals" { if node.Parent != nil && node.Parent.Name == "Signals" {
@@ -1240,8 +1342,8 @@ func formatNodeInfo(node *index.ProjectNode) string {
} }
// Size // Size
dims := node.Metadata["NumberOfDimensions"] dims := getEvaluatedMetadata(node, "NumberOfDimensions")
elems := node.Metadata["NumberOfElements"] elems := getEvaluatedMetadata(node, "NumberOfElements")
if dims != "" || elems != "" { if dims != "" || elems != "" {
sigInfo += fmt.Sprintf("**Size**: `[%s]`, `%s` dims ", elems, dims) sigInfo += fmt.Sprintf("**Size**: `[%s]`, `%s` dims ", elems, dims)
} }
@@ -1252,6 +1354,55 @@ func formatNodeInfo(node *index.ProjectNode) string {
info += fmt.Sprintf("\n\n%s", node.Doc) info += fmt.Sprintf("\n\n%s", node.Doc)
} }
// Check if Implicit Signal peers exist
if ds, _ := getSignalInfo(node); ds != nil {
peers := findSignalPeers(node)
// 1. Explicit Definition Fields
var defNode *index.ProjectNode
for _, p := range peers {
if p.Parent != nil && p.Parent.Name == "Signals" {
defNode = p
break
}
}
if defNode != nil {
for _, frag := range defNode.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok {
key := f.Name
if key != "Type" && key != "NumberOfElements" && key != "NumberOfDimensions" && key != "Class" {
val := valueToString(f.Value, defNode)
info += fmt.Sprintf("\n**%s**: `%s`", key, val)
}
}
}
}
}
extraInfo := ""
for _, p := range peers {
if (p.Parent.Name == "InputSignals" || p.Parent.Name == "OutputSignals") && isGAM(p.Parent.Parent) {
gamName := p.Parent.Parent.RealName
for _, frag := range p.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok {
key := f.Name
if key != "DataSource" && key != "Alias" && key != "Type" && key != "Class" && key != "NumberOfElements" && key != "NumberOfDimensions" {
val := valueToString(f.Value, p)
extraInfo += fmt.Sprintf("\n- **%s** (%s): `%s`", key, gamName, val)
}
}
}
}
}
}
if extraInfo != "" {
info += "\n\n**Usage Details**:" + extraInfo
}
}
// Find references // Find references
var refs []string var refs []string
for _, ref := range Tree.References { for _, ref := range Tree.References {
@@ -1400,6 +1551,81 @@ func HandleRename(params RenameParams) *WorkspaceEdit {
} }
if targetNode != nil { if targetNode != nil {
// Special handling for Signals (Implicit/Explicit)
if ds, _ := getSignalInfo(targetNode); ds != nil {
peers := findSignalPeers(targetNode)
seenPeers := make(map[*index.ProjectNode]bool)
for _, peer := range peers {
if seenPeers[peer] {
continue
}
seenPeers[peer] = true
// Rename Peer Definition
prefix := ""
if len(peer.RealName) > 0 {
first := peer.RealName[0]
if first == '+' || first == '$' {
prefix = string(first)
}
}
normNewName := strings.TrimLeft(params.NewName, "+$")
finalDefName := prefix + normNewName
hasAlias := false
for _, frag := range peer.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok && f.Name == "Alias" {
hasAlias = true
}
}
}
if !hasAlias {
for _, frag := range peer.Fragments {
if frag.IsObject {
rng := Range{
Start: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1},
End: Position{Line: frag.ObjectPos.Line - 1, Character: frag.ObjectPos.Column - 1 + len(peer.RealName)},
}
addEdit(frag.File, rng, finalDefName)
}
}
}
// Rename References to this Peer
for _, ref := range Tree.References {
if ref.Target == peer {
// Handle qualified names
if strings.Contains(ref.Name, ".") {
if strings.HasSuffix(ref.Name, "."+peer.Name) {
prefixLen := len(ref.Name) - len(peer.Name)
rng := Range{
Start: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1 + prefixLen},
End: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1 + len(ref.Name)},
}
addEdit(ref.File, rng, normNewName)
} else if ref.Name == peer.Name {
rng := Range{
Start: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1},
End: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1 + len(ref.Name)},
}
addEdit(ref.File, rng, normNewName)
}
} else {
rng := Range{
Start: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1},
End: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1 + len(ref.Name)},
}
addEdit(ref.File, rng, normNewName)
}
}
}
}
return &WorkspaceEdit{Changes: changes}
}
// 1. Rename Definitions // 1. Rename Definitions
prefix := "" prefix := ""
if len(targetNode.RealName) > 0 { if len(targetNode.RealName) > 0 {
@@ -1513,3 +1739,387 @@ func send(msg any) {
body, _ := json.Marshal(msg) body, _ := json.Marshal(msg)
fmt.Fprintf(Output, "Content-Length: %d\r\n\r\n%s", len(body), body) fmt.Fprintf(Output, "Content-Length: %d\r\n\r\n%s", len(body), body)
} }
func suggestVariables(container *index.ProjectNode) *CompletionList {
items := []CompletionItem{}
seen := make(map[string]bool)
curr := container
for curr != nil {
for name, info := range curr.Variables {
if !seen[name] {
seen[name] = true
doc := ""
if info.Def.DefaultValue != nil {
doc = fmt.Sprintf("Default: %s", valueToString(info.Def.DefaultValue, container))
}
kind := "Variable"
if info.Def.IsConst {
kind = "Constant"
}
items = append(items, CompletionItem{
Label: name,
Kind: 6, // Variable
Detail: fmt.Sprintf("%s (%s)", kind, info.Def.TypeExpr),
Documentation: doc,
})
}
}
curr = curr.Parent
}
return &CompletionList{Items: items}
}
func getSignalInfo(node *index.ProjectNode) (*index.ProjectNode, string) {
if node.Parent == nil {
return nil, ""
}
// Case 1: Definition
if node.Parent.Name == "Signals" && isDataSource(node.Parent.Parent) {
return node.Parent.Parent, node.RealName
}
// Case 2: Usage
if (node.Parent.Name == "InputSignals" || node.Parent.Name == "OutputSignals") && isGAM(node.Parent.Parent) {
dsName := ""
sigName := node.RealName
// Scan fields
for _, frag := range node.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok {
if f.Name == "DataSource" {
if v, ok := f.Value.(*parser.StringValue); ok {
dsName = v.Value
}
if v, ok := f.Value.(*parser.ReferenceValue); ok {
dsName = v.Value
}
}
if f.Name == "Alias" {
if v, ok := f.Value.(*parser.StringValue); ok {
sigName = v.Value
}
if v, ok := f.Value.(*parser.ReferenceValue); ok {
sigName = v.Value
}
}
}
}
}
if dsName != "" {
dsNode := Tree.ResolveName(node, dsName, isDataSource)
return dsNode, sigName
}
}
return nil, ""
}
func findSignalPeers(target *index.ProjectNode) []*index.ProjectNode {
dsNode, sigName := getSignalInfo(target)
if dsNode == nil || sigName == "" {
return nil
}
var peers []*index.ProjectNode
// Add definition if exists (and not already target)
if signals, ok := dsNode.Children["Signals"]; ok {
if def, ok := signals.Children[index.NormalizeName(sigName)]; ok {
peers = append(peers, def)
}
}
// Find usages
Tree.Walk(func(n *index.ProjectNode) {
d, s := getSignalInfo(n)
if d == dsNode && s == sigName {
peers = append(peers, n)
}
})
return peers
}
func evaluate(val parser.Value, ctx *index.ProjectNode) parser.Value {
switch v := val.(type) {
case *parser.VariableReferenceValue:
name := strings.TrimLeft(v.Name, "@")
if info := Tree.ResolveVariable(ctx, name); info != nil {
if info.Def.DefaultValue != nil {
return evaluate(info.Def.DefaultValue, ctx)
}
}
return v
case *parser.BinaryExpression:
left := evaluate(v.Left, ctx)
right := evaluate(v.Right, ctx)
return compute(left, v.Operator, right)
case *parser.UnaryExpression:
right := evaluate(v.Right, ctx)
return computeUnary(v.Operator, right)
}
return val
}
func compute(left parser.Value, op parser.Token, right parser.Value) parser.Value {
if op.Type == parser.TokenConcat {
getRaw := func(v parser.Value) string {
if s, ok := v.(*parser.StringValue); ok {
return s.Value
}
return valueToString(v, nil)
}
s1 := getRaw(left)
s2 := getRaw(right)
return &parser.StringValue{Value: s1 + s2, Quoted: true}
}
toInt := func(v parser.Value) (int64, bool) {
if idx, ok := v.(*parser.IntValue); ok {
return idx.Value, true
}
return 0, false
}
toFloat := func(v parser.Value) (float64, bool) {
if f, ok := v.(*parser.FloatValue); ok {
return f.Value, true
}
if idx, ok := v.(*parser.IntValue); ok {
return float64(idx.Value), true
}
return 0, false
}
lI, lIsI := toInt(left)
rI, rIsI := toInt(right)
if lIsI && rIsI {
var res int64
switch op.Type {
case parser.TokenPlus:
res = lI + rI
case parser.TokenMinus:
res = lI - rI
case parser.TokenStar:
res = lI * rI
case parser.TokenSlash:
if rI != 0 {
res = lI / rI
}
case parser.TokenPercent:
if rI != 0 {
res = lI % rI
}
case parser.TokenAmpersand:
res = lI & rI
case parser.TokenPipe:
res = lI | rI
case parser.TokenCaret:
res = lI ^ rI
}
return &parser.IntValue{Value: res, Raw: fmt.Sprintf("%d", res)}
}
lF, lIsF := toFloat(left)
rF, rIsF := toFloat(right)
if lIsF || rIsF {
var res float64
switch op.Type {
case parser.TokenPlus:
res = lF + rF
case parser.TokenMinus:
res = lF - rF
case parser.TokenStar:
res = lF * rF
case parser.TokenSlash:
res = lF / rF
}
return &parser.FloatValue{Value: res, Raw: fmt.Sprintf("%g", res)}
}
return left
}
func computeUnary(op parser.Token, val parser.Value) parser.Value {
switch op.Type {
case parser.TokenMinus:
if i, ok := val.(*parser.IntValue); ok {
return &parser.IntValue{Value: -i.Value, Raw: fmt.Sprintf("%d", -i.Value)}
}
if f, ok := val.(*parser.FloatValue); ok {
return &parser.FloatValue{Value: -f.Value, Raw: fmt.Sprintf("%g", -f.Value)}
}
case parser.TokenSymbol:
if op.Value == "!" {
if b, ok := val.(*parser.BoolValue); ok {
return &parser.BoolValue{Value: !b.Value}
}
}
}
return val
}
func isComplexValue(val parser.Value) bool {
switch val.(type) {
case *parser.BinaryExpression, *parser.UnaryExpression, *parser.VariableReferenceValue:
return true
}
return false
}
func HandleInlayHint(params InlayHintParams) []InlayHint {
path := uriToPath(params.TextDocument.URI)
var hints []InlayHint
seenPositions := make(map[Position]bool)
addHint := func(h InlayHint) {
if !seenPositions[h.Position] {
hints = append(hints, h)
seenPositions[h.Position] = true
}
}
Tree.Walk(func(node *index.ProjectNode) {
for _, frag := range node.Fragments {
if frag.File != path {
continue
}
// Signal Name Hint (::TYPE[SIZE])
if node.Parent != nil && (node.Parent.Name == "InputSignals" || node.Parent.Name == "OutputSignals") {
typ := getEvaluatedMetadata(node, "Type")
elems := getEvaluatedMetadata(node, "NumberOfElements")
dims := getEvaluatedMetadata(node, "NumberOfDimensions")
if typ == "" && node.Target != nil {
typ = node.Target.Metadata["Type"]
if elems == "" {
elems = node.Target.Metadata["NumberOfElements"]
}
if dims == "" {
dims = node.Target.Metadata["NumberOfDimensions"]
}
}
if typ != "" {
if elems == "" {
elems = "1"
}
if dims == "" {
dims = "1"
}
label := fmt.Sprintf("::%s[%sx%s]", typ, elems, dims)
pos := frag.ObjectPos
addHint(InlayHint{
Position: Position{Line: pos.Line - 1, Character: pos.Column - 1 + len(node.RealName)},
Label: label,
Kind: 2, // Type
})
}
}
// Field-based hints (DataSource class and Expression evaluation)
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok {
// DataSource Class Hint
if f.Name == "DataSource" && (node.Parent != nil && (node.Parent.Name == "InputSignals" || node.Parent.Name == "OutputSignals")) {
dsName := valueToString(f.Value, node)
dsNode := Tree.ResolveName(node, dsName, isDataSource)
if dsNode != nil {
cls := dsNode.Metadata["Class"]
if cls != "" {
addHint(InlayHint{
Position: Position{Line: f.Position.Line - 1, Character: f.Position.Column - 1 + len(f.Name) + 3}, // "DataSource = "
Label: cls + "::",
Kind: 1, // Parameter
})
}
}
}
// Expression Evaluation Hint
if isComplexValue(f.Value) {
res := valueToString(f.Value, node)
if res != "" {
uri := params.TextDocument.URI
text, ok := Documents[uri]
if ok {
lines := strings.Split(text, "\n")
lineIdx := f.Position.Line - 1
if lineIdx >= 0 && lineIdx < len(lines) {
line := lines[lineIdx]
addHint(InlayHint{
Position: Position{Line: lineIdx, Character: len(line)},
Label: " => " + res,
Kind: 2, // Type/Value
})
}
}
}
}
} else if v, ok := def.(*parser.VariableDefinition); ok {
// Expression Evaluation Hint for #let/#var
if v.DefaultValue != nil && isComplexValue(v.DefaultValue) {
res := valueToString(v.DefaultValue, node)
if res != "" {
uri := params.TextDocument.URI
text, ok := Documents[uri]
if ok {
lines := strings.Split(text, "\n")
lineIdx := v.Position.Line - 1
if lineIdx >= 0 && lineIdx < len(lines) {
line := lines[lineIdx]
addHint(InlayHint{
Position: Position{Line: lineIdx, Character: len(line)},
Label: " => " + res,
Kind: 2,
})
}
}
}
}
}
}
}
})
// Add logic for general object references
for _, ref := range Tree.References {
if ref.File != path {
continue
}
if ref.Target != nil {
cls := ref.Target.Metadata["Class"]
if cls != "" {
addHint(InlayHint{
Position: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1},
Label: cls + "::",
Kind: 1, // Parameter
})
}
} else if ref.IsVariable {
// Variable reference evaluation hint: @VAR(=> VALUE)
container := Tree.GetNodeContaining(ref.File, ref.Position)
if info := Tree.ResolveVariable(container, ref.Name); info != nil && info.Def.DefaultValue != nil {
val := valueToString(info.Def.DefaultValue, container)
if val != "" {
addHint(InlayHint{
Position: Position{Line: ref.Position.Line - 1, Character: ref.Position.Column - 1 + len(ref.Name) + 1},
Label: "(=> " + val + ")",
Kind: 2,
})
}
}
}
}
return hints
}

View File

@@ -131,6 +131,7 @@ type VariableDefinition struct {
Name string Name string
TypeExpr string TypeExpr string
DefaultValue Value DefaultValue Value
IsConst bool
} }
func (v *VariableDefinition) Pos() Position { return v.Position } func (v *VariableDefinition) Pos() Position { return v.Position }
@@ -153,3 +154,12 @@ type BinaryExpression struct {
func (b *BinaryExpression) Pos() Position { return b.Position } func (b *BinaryExpression) Pos() Position { return b.Position }
func (b *BinaryExpression) isValue() {} func (b *BinaryExpression) isValue() {}
type UnaryExpression struct {
Position Position
Operator Token
Right Value
}
func (u *UnaryExpression) Pos() Position { return u.Position }
func (u *UnaryExpression) isValue() {}

View File

@@ -20,6 +20,7 @@ const (
TokenBool TokenBool
TokenPackage TokenPackage
TokenPragma TokenPragma
TokenLet
TokenComment TokenComment
TokenDocstring TokenDocstring
TokenComma TokenComma
@@ -147,18 +148,12 @@ func (l *Lexer) NextToken() Token {
case ']': case ']':
return l.emit(TokenRBracket) return l.emit(TokenRBracket)
case '+': case '+':
if unicode.IsSpace(l.peek()) { if unicode.IsSpace(l.peek()) || unicode.IsDigit(l.peek()) {
return l.emit(TokenPlus) return l.emit(TokenPlus)
} }
return l.lexObjectIdentifier() return l.lexObjectIdentifier()
case '-': case '-':
if unicode.IsDigit(l.peek()) { return l.emit(TokenMinus)
return l.lexNumber()
}
if unicode.IsSpace(l.peek()) {
return l.emit(TokenMinus)
}
return l.lexIdentifier()
case '*': case '*':
return l.emit(TokenStar) return l.emit(TokenStar)
case '/': case '/':
@@ -242,13 +237,64 @@ func (l *Lexer) lexString() Token {
} }
func (l *Lexer) lexNumber() Token { func (l *Lexer) lexNumber() Token {
for { // Check for hex or binary prefix if we started with '0'
r := l.next() if l.input[l.start:l.pos] == "0" {
if unicode.IsDigit(r) || unicode.IsLetter(r) || r == '.' || r == '-' || r == '+' { switch l.peek() {
continue case 'x', 'X':
l.next()
l.lexHexDigits()
return l.emit(TokenNumber)
case 'b', 'B':
l.next()
l.lexBinaryDigits()
return l.emit(TokenNumber)
} }
l.backup() }
return l.emit(TokenNumber)
// Consume remaining digits
l.lexDigits()
if l.peek() == '.' {
l.next()
l.lexDigits()
}
if r := l.peek(); r == 'e' || r == 'E' {
l.next()
if p := l.peek(); p == '+' || p == '-' {
l.next()
}
l.lexDigits()
}
return l.emit(TokenNumber)
}
func (l *Lexer) lexHexDigits() {
for {
r := l.peek()
if unicode.IsDigit(r) || (r >= 'a' && r <= 'f') || (r >= 'A' && r <= 'F') {
l.next()
} else {
break
}
}
}
func (l *Lexer) lexBinaryDigits() {
for {
r := l.peek()
if r == '0' || r == '1' {
l.next()
} else {
break
}
}
}
func (l *Lexer) lexDigits() {
for unicode.IsDigit(l.peek()) {
l.next()
} }
} }
@@ -312,13 +358,16 @@ func (l *Lexer) lexHashIdentifier() Token {
if val == "#package" { if val == "#package" {
return l.lexUntilNewline(TokenPackage) return l.lexUntilNewline(TokenPackage)
} }
if val == "#let" {
return l.emit(TokenLet)
}
return l.emit(TokenIdentifier) return l.emit(TokenIdentifier)
} }
func (l *Lexer) lexVariableReference() Token { func (l *Lexer) lexVariableReference() Token {
for { for {
r := l.next() r := l.next()
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '-' { if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' {
continue continue
} }
l.backup() l.backup()

View File

@@ -99,6 +99,8 @@ func (p *Parser) Parse() (*Configuration, error) {
func (p *Parser) parseDefinition() (Definition, bool) { func (p *Parser) parseDefinition() (Definition, bool) {
tok := p.next() tok := p.next()
switch tok.Type { switch tok.Type {
case TokenLet:
return p.parseLet(tok)
case TokenIdentifier: case TokenIdentifier:
name := tok.Value name := tok.Value
if name == "#var" { if name == "#var" {
@@ -286,7 +288,11 @@ func (p *Parser) parseAtom() (Value, bool) {
}, true }, true
case TokenNumber: case TokenNumber:
if strings.Contains(tok.Value, ".") || strings.Contains(tok.Value, "e") { isFloat := (strings.Contains(tok.Value, ".") || strings.Contains(tok.Value, "e") || strings.Contains(tok.Value, "E")) &&
!strings.HasPrefix(tok.Value, "0x") && !strings.HasPrefix(tok.Value, "0X") &&
!strings.HasPrefix(tok.Value, "0b") && !strings.HasPrefix(tok.Value, "0B")
if isFloat {
f, _ := strconv.ParseFloat(tok.Value, 64) f, _ := strconv.ParseFloat(tok.Value, 64)
return &FloatValue{Position: tok.Position, Value: f, Raw: tok.Value}, true return &FloatValue{Position: tok.Position, Value: f, Raw: tok.Value}, true
} }
@@ -299,6 +305,34 @@ func (p *Parser) parseAtom() (Value, bool) {
return &ReferenceValue{Position: tok.Position, Value: tok.Value}, true return &ReferenceValue{Position: tok.Position, Value: tok.Value}, true
case TokenVariableReference: case TokenVariableReference:
return &VariableReferenceValue{Position: tok.Position, Name: tok.Value}, true return &VariableReferenceValue{Position: tok.Position, Name: tok.Value}, true
case TokenMinus:
val, ok := p.parseAtom()
if !ok {
return nil, false
}
return &UnaryExpression{Position: tok.Position, Operator: tok, Right: val}, true
case TokenObjectIdentifier:
return &VariableReferenceValue{Position: tok.Position, Name: tok.Value}, true
case TokenSymbol:
if tok.Value == "(" {
val, ok := p.parseExpression(0)
if !ok {
return nil, false
}
if next := p.next(); next.Type != TokenSymbol || next.Value != ")" {
p.addError(next.Position, "expected )")
return nil, false
}
return val, true
}
if tok.Value == "!" {
val, ok := p.parseAtom()
if !ok {
return nil, false
}
return &UnaryExpression{Position: tok.Position, Operator: tok, Right: val}, true
}
fallthrough
case TokenLBrace: case TokenLBrace:
arr := &ArrayValue{Position: tok.Position} arr := &ArrayValue{Position: tok.Position}
for { for {
@@ -380,3 +414,59 @@ func (p *Parser) parseVariableDefinition(startTok Token) (Definition, bool) {
DefaultValue: defVal, DefaultValue: defVal,
}, true }, true
} }
func (p *Parser) parseLet(startTok Token) (Definition, bool) {
nameTok := p.next()
if nameTok.Type != TokenIdentifier {
p.addError(nameTok.Position, "expected constant name")
return nil, false
}
if p.next().Type != TokenColon {
p.addError(nameTok.Position, "expected :")
return nil, false
}
var typeTokens []Token
startLine := nameTok.Position.Line
for {
t := p.peek()
if t.Position.Line > startLine || t.Type == TokenEOF {
break
}
if t.Type == TokenEqual {
break
}
typeTokens = append(typeTokens, p.next())
}
typeExpr := ""
for _, t := range typeTokens {
typeExpr += t.Value + " "
}
var defVal Value
if p.next().Type != TokenEqual {
p.addError(nameTok.Position, "expected =")
return nil, false
}
val, ok := p.parseValue()
if ok {
defVal = val
} else {
return nil, false
}
return &VariableDefinition{
Position: startTok.Position,
Name: nameTok.Value,
TypeExpr: strings.TrimSpace(typeExpr),
DefaultValue: defVal,
IsConst: true,
}, true
}
func (p *Parser) Errors() []error {
return p.errors
}

View File

@@ -56,6 +56,7 @@ func (v *Validator) ValidateProject() {
v.CheckUnused() v.CheckUnused()
v.CheckDataSourceThreading() v.CheckDataSourceThreading()
v.CheckINOUTOrdering() v.CheckINOUTOrdering()
v.CheckSignalConsistency()
v.CheckVariables() v.CheckVariables()
v.CheckUnresolvedVariables() v.CheckUnresolvedVariables()
} }
@@ -236,6 +237,108 @@ func (v *Validator) valueToInterface(val parser.Value, ctx *index.ProjectNode) i
arr = append(arr, v.valueToInterface(e, ctx)) arr = append(arr, v.valueToInterface(e, ctx))
} }
return arr return arr
case *parser.BinaryExpression:
left := v.valueToInterface(t.Left, ctx)
right := v.valueToInterface(t.Right, ctx)
return v.evaluateBinary(left, t.Operator.Type, right)
case *parser.UnaryExpression:
val := v.valueToInterface(t.Right, ctx)
return v.evaluateUnary(t.Operator.Type, val)
}
return nil
}
func (v *Validator) evaluateBinary(left interface{}, op parser.TokenType, right interface{}) interface{} {
if left == nil || right == nil {
return nil
}
if op == parser.TokenConcat {
return fmt.Sprintf("%v%v", left, right)
}
toInt := func(val interface{}) (int64, bool) {
switch v := val.(type) {
case int64:
return v, true
case int:
return int64(v), true
}
return 0, false
}
toFloat := func(val interface{}) (float64, bool) {
switch v := val.(type) {
case float64:
return v, true
case int64:
return float64(v), true
case int:
return float64(v), true
}
return 0, false
}
if l, ok := toInt(left); ok {
if r, ok := toInt(right); ok {
switch op {
case parser.TokenPlus:
return l + r
case parser.TokenMinus:
return l - r
case parser.TokenStar:
return l * r
case parser.TokenSlash:
if r != 0 {
return l / r
}
case parser.TokenPercent:
if r != 0 {
return l % r
}
}
}
}
if l, ok := toFloat(left); ok {
if r, ok := toFloat(right); ok {
switch op {
case parser.TokenPlus:
return l + r
case parser.TokenMinus:
return l - r
case parser.TokenStar:
return l * r
case parser.TokenSlash:
if r != 0 {
return l / r
}
}
}
}
return nil
}
func (v *Validator) evaluateUnary(op parser.TokenType, val interface{}) interface{} {
if val == nil {
return nil
}
switch op {
case parser.TokenMinus:
switch v := val.(type) {
case int64:
return -v
case float64:
return -v
}
case parser.TokenSymbol: // ! is Symbol?
// Parser uses TokenSymbol for ! ?
// Lexer: '!' -> Symbol.
if b, ok := val.(bool); ok {
return !b
}
} }
return nil return nil
} }
@@ -304,7 +407,7 @@ func (v *Validator) validateGAMSignal(gamNode, signalNode *index.ProjectNode, di
return // Ignore implicit signals or missing datasource (handled elsewhere if mandatory) return // Ignore implicit signals or missing datasource (handled elsewhere if mandatory)
} }
dsNode := v.resolveReference(dsName, v.getNodeFile(signalNode), isDataSource) dsNode := v.resolveReference(dsName, signalNode, isDataSource)
if dsNode == nil { if dsNode == nil {
v.Diagnostics = append(v.Diagnostics, Diagnostic{ v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError, Level: LevelError,
@@ -442,11 +545,52 @@ func (v *Validator) validateGAMSignal(gamNode, signalNode *index.ProjectNode, di
} }
} }
} }
// Validate Value initialization
if valField, hasValue := fields["Value"]; hasValue && len(valField) > 0 {
var typeStr string
if typeFields, ok := fields["Type"]; ok && len(typeFields) > 0 {
typeStr = v.getFieldValue(typeFields[0], signalNode)
} else if signalNode.Target != nil {
if t, ok := signalNode.Target.Metadata["Type"]; ok {
typeStr = t
}
}
if typeStr != "" && v.Schema != nil {
ctx := v.Schema.Context
typeVal := ctx.CompileString(typeStr)
if typeVal.Err() == nil {
valInterface := v.valueToInterface(valField[0].Value, signalNode)
valVal := ctx.Encode(valInterface)
res := typeVal.Unify(valVal)
if err := res.Validate(cue.Concrete(true)); err != nil {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Value initialization mismatch for signal '%s': %v", signalNode.RealName, err),
Position: valField[0].Position,
File: v.getNodeFile(signalNode),
})
}
}
}
}
}
func (v *Validator) getEvaluatedMetadata(node *index.ProjectNode, key string) string {
for _, frag := range node.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok && f.Name == key {
return v.getFieldValue(f, node)
}
}
}
return node.Metadata[key]
} }
func (v *Validator) checkSignalProperty(gamSig, dsSig *index.ProjectNode, prop string) { func (v *Validator) checkSignalProperty(gamSig, dsSig *index.ProjectNode, prop string) {
gamVal := gamSig.Metadata[prop] gamVal := v.getEvaluatedMetadata(gamSig, prop)
dsVal := dsSig.Metadata[prop] dsVal := v.getEvaluatedMetadata(dsSig, prop)
if gamVal == "" { if gamVal == "" {
return return
@@ -513,39 +657,15 @@ func (v *Validator) getFields(node *index.ProjectNode) map[string][]*parser.Fiel
} }
func (v *Validator) getFieldValue(f *parser.Field, ctx *index.ProjectNode) string { func (v *Validator) getFieldValue(f *parser.Field, ctx *index.ProjectNode) string {
switch val := f.Value.(type) { res := v.valueToInterface(f.Value, ctx)
case *parser.StringValue: if res == nil {
return val.Value return ""
case *parser.ReferenceValue:
return val.Value
case *parser.IntValue:
return val.Raw
case *parser.FloatValue:
return val.Raw
case *parser.BoolValue:
return strconv.FormatBool(val.Value)
case *parser.VariableReferenceValue:
name := strings.TrimPrefix(val.Name, "@")
if info := v.Tree.ResolveVariable(ctx, name); info != nil {
if info.Def.DefaultValue != nil {
return v.getFieldValue(&parser.Field{Value: info.Def.DefaultValue}, ctx)
}
}
} }
return "" return fmt.Sprintf("%v", res)
} }
func (v *Validator) resolveReference(name string, file string, predicate func(*index.ProjectNode) bool) *index.ProjectNode { func (v *Validator) resolveReference(name string, ctx *index.ProjectNode, predicate func(*index.ProjectNode) bool) *index.ProjectNode {
if isoNode, ok := v.Tree.IsolatedFiles[file]; ok { return v.Tree.ResolveName(ctx, name, predicate)
if found := v.Tree.FindNode(isoNode, name, predicate); found != nil {
return found
}
return nil
}
if v.Tree.Root == nil {
return nil
}
return v.Tree.FindNode(v.Tree.Root, name, predicate)
} }
func (v *Validator) getNodeClass(node *index.ProjectNode) string { func (v *Validator) getNodeClass(node *index.ProjectNode) string {
@@ -710,7 +830,7 @@ func (v *Validator) checkFunctionsArray(node *index.ProjectNode, fields map[stri
if arr, ok := f.Value.(*parser.ArrayValue); ok { if arr, ok := f.Value.(*parser.ArrayValue); ok {
for _, elem := range arr.Elements { for _, elem := range arr.Elements {
if ref, ok := elem.(*parser.ReferenceValue); ok { if ref, ok := elem.(*parser.ReferenceValue); ok {
target := v.resolveReference(ref.Value, v.getNodeFile(node), isGAM) target := v.resolveReference(ref.Value, node, isGAM)
if target == nil { if target == nil {
v.Diagnostics = append(v.Diagnostics, Diagnostic{ v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError, Level: LevelError,
@@ -769,19 +889,20 @@ func (v *Validator) CheckDataSourceThreading() {
return return
} }
// 1. Find RealTimeApplication var appNodes []*index.ProjectNode
var appNode *index.ProjectNode
findApp := func(n *index.ProjectNode) { findApp := func(n *index.ProjectNode) {
if cls, ok := n.Metadata["Class"]; ok && cls == "RealTimeApplication" { if cls, ok := n.Metadata["Class"]; ok && cls == "RealTimeApplication" {
appNode = n appNodes = append(appNodes, n)
} }
} }
v.Tree.Walk(findApp) v.Tree.Walk(findApp)
if appNode == nil { for _, appNode := range appNodes {
return v.checkAppDataSourceThreading(appNode)
} }
}
func (v *Validator) checkAppDataSourceThreading(appNode *index.ProjectNode) {
// 2. Find States // 2. Find States
var statesNode *index.ProjectNode var statesNode *index.ProjectNode
if s, ok := appNode.Children["States"]; ok { if s, ok := appNode.Children["States"]; ok {
@@ -852,7 +973,7 @@ func (v *Validator) getThreadGAMs(thread *index.ProjectNode) []*index.ProjectNod
if arr, ok := f.Value.(*parser.ArrayValue); ok { if arr, ok := f.Value.(*parser.ArrayValue); ok {
for _, elem := range arr.Elements { for _, elem := range arr.Elements {
if ref, ok := elem.(*parser.ReferenceValue); ok { if ref, ok := elem.(*parser.ReferenceValue); ok {
target := v.resolveReference(ref.Value, v.getNodeFile(thread), isGAM) target := v.resolveReference(ref.Value, thread, isGAM)
if target != nil { if target != nil {
gams = append(gams, target) gams = append(gams, target)
} }
@@ -874,7 +995,7 @@ func (v *Validator) getGAMDataSources(gam *index.ProjectNode) []*index.ProjectNo
fields := v.getFields(sig) fields := v.getFields(sig)
if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 { if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 {
dsName := v.getFieldValue(dsFields[0], sig) dsName := v.getFieldValue(dsFields[0], sig)
dsNode := v.resolveReference(dsName, v.getNodeFile(sig), isDataSource) dsNode := v.resolveReference(dsName, sig, isDataSource)
if dsNode != nil { if dsNode != nil {
dsMap[dsNode] = true dsMap[dsNode] = true
} }
@@ -908,18 +1029,20 @@ func (v *Validator) CheckINOUTOrdering() {
return return
} }
var appNode *index.ProjectNode var appNodes []*index.ProjectNode
findApp := func(n *index.ProjectNode) { findApp := func(n *index.ProjectNode) {
if cls, ok := n.Metadata["Class"]; ok && cls == "RealTimeApplication" { if cls, ok := n.Metadata["Class"]; ok && cls == "RealTimeApplication" {
appNode = n appNodes = append(appNodes, n)
} }
} }
v.Tree.Walk(findApp) v.Tree.Walk(findApp)
if appNode == nil { for _, appNode := range appNodes {
return v.checkAppINOUTOrdering(appNode)
} }
}
func (v *Validator) checkAppINOUTOrdering(appNode *index.ProjectNode) {
var statesNode *index.ProjectNode var statesNode *index.ProjectNode
if s, ok := appNode.Children["States"]; ok { if s, ok := appNode.Children["States"]; ok {
statesNode = s statesNode = s
@@ -936,6 +1059,7 @@ func (v *Validator) CheckINOUTOrdering() {
return return
} }
suppress := v.isGloballyAllowed("not_consumed", v.getNodeFile(appNode))
for _, state := range statesNode.Children { for _, state := range statesNode.Children {
var threads []*index.ProjectNode var threads []*index.ProjectNode
for _, child := range state.Children { for _, child := range state.Children {
@@ -961,24 +1085,34 @@ func (v *Validator) CheckINOUTOrdering() {
v.processGAMSignalsForOrdering(gam, "InputSignals", producedSignals, consumedSignals, true, thread, state) v.processGAMSignalsForOrdering(gam, "InputSignals", producedSignals, consumedSignals, true, thread, state)
v.processGAMSignalsForOrdering(gam, "OutputSignals", producedSignals, consumedSignals, false, thread, state) v.processGAMSignalsForOrdering(gam, "OutputSignals", producedSignals, consumedSignals, false, thread, state)
} }
if !suppress {
// Check for produced but not consumed // Check for produced but not consumed
for ds, signals := range producedSignals { for ds, signals := range producedSignals {
for sigName, producers := range signals { for sigName, producers := range signals {
consumed := false consumed := false
if cSet, ok := consumedSignals[ds]; ok { if cSet, ok := consumedSignals[ds]; ok {
if cSet[sigName] { if cSet[sigName] {
consumed = true consumed = true
}
} }
} if !consumed {
if !consumed { for _, prod := range producers {
for _, prod := range producers { locally_suppressed := false
v.Diagnostics = append(v.Diagnostics, Diagnostic{ for _, p := range prod.Pragmas {
Level: LevelWarning, if strings.HasPrefix(p, "not_consumed:") || strings.HasPrefix(p, "ignore(not_consumed)") {
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is produced in thread '%s' but never consumed in the same thread.", sigName, ds.RealName, thread.RealName), locally_suppressed = true
Position: v.getNodePosition(prod), break
File: v.getNodeFile(prod), }
}) }
if !locally_suppressed {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelWarning,
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is produced in thread '%s' but never consumed in the same thread.", sigName, ds.RealName, thread.RealName),
Position: v.getNodePosition(prod),
File: v.getNodeFile(prod),
})
}
}
} }
} }
} }
@@ -992,7 +1126,7 @@ func (v *Validator) processGAMSignalsForOrdering(gam *index.ProjectNode, contain
if container == nil { if container == nil {
return return
} }
not_produced_suppress := v.isGloballyAllowed("not_produced", v.getNodeFile(gam))
for _, sig := range container.Children { for _, sig := range container.Children {
fields := v.getFields(sig) fields := v.getFields(sig)
var dsNode *index.ProjectNode var dsNode *index.ProjectNode
@@ -1008,7 +1142,7 @@ func (v *Validator) processGAMSignalsForOrdering(gam *index.ProjectNode, contain
if dsNode == nil { if dsNode == nil {
if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 { if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 {
dsName := v.getFieldValue(dsFields[0], sig) dsName := v.getFieldValue(dsFields[0], sig)
dsNode = v.resolveReference(dsName, v.getNodeFile(sig), isDataSource) dsNode = v.resolveReference(dsName, sig, isDataSource)
} }
if aliasFields, ok := fields["Alias"]; ok && len(aliasFields) > 0 { if aliasFields, ok := fields["Alias"]; ok && len(aliasFields) > 0 {
sigName = v.getFieldValue(aliasFields[0], sig) sigName = v.getFieldValue(aliasFields[0], sig)
@@ -1033,22 +1167,39 @@ func (v *Validator) processGAMSignalsForOrdering(gam *index.ProjectNode, contain
} }
if isInput { if isInput {
isProduced := false // Check if signal has 'Value' field - treat as produced/initialized
if set, ok := produced[dsNode]; ok { if _, hasValue := fields["Value"]; hasValue {
if len(set[sigName]) > 0 { if produced[dsNode] == nil {
isProduced = true produced[dsNode] = make(map[string][]*index.ProjectNode)
} }
produced[dsNode][sigName] = append(produced[dsNode][sigName], sig)
} }
if !isProduced { if !not_produced_suppress {
v.Diagnostics = append(v.Diagnostics, Diagnostic{ isProduced := false
Level: LevelError, if set, ok := produced[dsNode]; ok {
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is consumed by GAM '%s' in thread '%s' (State '%s') before being produced by any previous GAM.", sigName, dsNode.RealName, gam.RealName, thread.RealName, state.RealName), if len(set[sigName]) > 0 {
Position: v.getNodePosition(sig), isProduced = true
File: v.getNodeFile(sig), }
}) }
} locally_suppressed := false
for _, p := range sig.Pragmas {
if strings.HasPrefix(p, "not_produced:") || strings.HasPrefix(p, "ignore(not_produced)") {
locally_suppressed = true
break
}
}
if !isProduced && !locally_suppressed {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("INOUT Signal '%s' (DS '%s') is consumed by GAM '%s' in thread '%s' (State '%s') before being produced by any previous GAM.", sigName, dsNode.RealName, gam.RealName, thread.RealName, state.RealName),
Position: v.getNodePosition(sig),
File: v.getNodeFile(sig),
})
}
}
if consumed[dsNode] == nil { if consumed[dsNode] == nil {
consumed[dsNode] = make(map[string]bool) consumed[dsNode] = make(map[string]bool)
} }
@@ -1079,6 +1230,93 @@ func (v *Validator) getDataSourceDirection(ds *index.ProjectNode) string {
return "" return ""
} }
func (v *Validator) CheckSignalConsistency() {
// Map: DataSourceNode -> SignalName -> List of Signals
signals := make(map[*index.ProjectNode]map[string][]*index.ProjectNode)
// Helper to collect signals
collect := func(node *index.ProjectNode) {
if !isGAM(node) {
return
}
// Check Input and Output
for _, dir := range []string{"InputSignals", "OutputSignals"} {
if container, ok := node.Children[dir]; ok {
for _, sig := range container.Children {
fields := v.getFields(sig)
var dsNode *index.ProjectNode
var sigName string
// Resolve DS
if dsFields, ok := fields["DataSource"]; ok && len(dsFields) > 0 {
dsName := v.getFieldValue(dsFields[0], sig)
if dsName != "" {
dsNode = v.resolveReference(dsName, sig, isDataSource)
}
}
// Resolve Name (Alias or RealName)
if aliasFields, ok := fields["Alias"]; ok && len(aliasFields) > 0 {
sigName = v.getFieldValue(aliasFields[0], sig)
} else {
sigName = sig.RealName
}
if dsNode != nil && sigName != "" {
sigName = index.NormalizeName(sigName)
if signals[dsNode] == nil {
signals[dsNode] = make(map[string][]*index.ProjectNode)
}
signals[dsNode][sigName] = append(signals[dsNode][sigName], sig)
}
}
}
}
}
v.Tree.Walk(collect)
// Check Consistency
for ds, sigMap := range signals {
for sigName, usages := range sigMap {
if len(usages) <= 1 {
continue
}
// Check Type consistency
var firstType string
var firstNode *index.ProjectNode
for _, u := range usages {
// Get Type
typeVal := ""
fields := v.getFields(u)
if typeFields, ok := fields["Type"]; ok && len(typeFields) > 0 {
typeVal = v.getFieldValue(typeFields[0], u)
}
if typeVal == "" {
continue
}
if firstNode == nil {
firstType = typeVal
firstNode = u
} else {
if typeVal != firstType {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Signal Type Mismatch: Signal '%s' (in DS '%s') is defined as '%s' in '%s' but as '%s' in '%s'", sigName, ds.RealName, firstType, firstNode.Parent.Parent.RealName, typeVal, u.Parent.Parent.RealName),
Position: v.getNodePosition(u),
File: v.getNodeFile(u),
})
}
}
}
}
}
}
func (v *Validator) CheckVariables() { func (v *Validator) CheckVariables() {
if v.Schema == nil { if v.Schema == nil {
return return
@@ -1086,34 +1324,57 @@ func (v *Validator) CheckVariables() {
ctx := v.Schema.Context ctx := v.Schema.Context
checkNodeVars := func(node *index.ProjectNode) { checkNodeVars := func(node *index.ProjectNode) {
for _, info := range node.Variables { seen := make(map[string]parser.Position)
def := info.Def for _, frag := range node.Fragments {
for _, def := range frag.Definitions {
if vdef, ok := def.(*parser.VariableDefinition); ok {
if prevPos, exists := seen[vdef.Name]; exists {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Duplicate variable definition: '%s' was already defined at %d:%d", vdef.Name, prevPos.Line, prevPos.Column),
Position: vdef.Position,
File: frag.File,
})
}
seen[vdef.Name] = vdef.Position
// Compile Type if vdef.IsConst && vdef.DefaultValue == nil {
typeVal := ctx.CompileString(def.TypeExpr) v.Diagnostics = append(v.Diagnostics, Diagnostic{
if typeVal.Err() != nil { Level: LevelError,
v.Diagnostics = append(v.Diagnostics, Diagnostic{ Message: fmt.Sprintf("Constant variable '%s' must have an initial value", vdef.Name),
Level: LevelError, Position: vdef.Position,
Message: fmt.Sprintf("Invalid type expression for variable '%s': %v", def.Name, typeVal.Err()), File: frag.File,
Position: def.Position, })
File: info.File, continue
}) }
continue
}
if def.DefaultValue != nil { // Compile Type
valInterface := v.valueToInterface(def.DefaultValue, node) typeVal := ctx.CompileString(vdef.TypeExpr)
valVal := ctx.Encode(valInterface) if typeVal.Err() != nil {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Invalid type expression for variable '%s': %v", vdef.Name, typeVal.Err()),
Position: vdef.Position,
File: frag.File,
})
continue
}
// Unify if vdef.DefaultValue != nil {
res := typeVal.Unify(valVal) valInterface := v.valueToInterface(vdef.DefaultValue, node)
if err := res.Validate(cue.Concrete(true)); err != nil { valVal := ctx.Encode(valInterface)
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError, // Unify
Message: fmt.Sprintf("Variable '%s' value mismatch: %v", def.Name, err), res := typeVal.Unify(valVal)
Position: def.Position, if err := res.Validate(cue.Concrete(true)); err != nil {
File: info.File, v.Diagnostics = append(v.Diagnostics, Diagnostic{
}) Level: LevelError,
Message: fmt.Sprintf("Variable '%s' value mismatch: %v", vdef.Name, err),
Position: vdef.Position,
File: frag.File,
})
}
}
} }
} }
} }
@@ -1121,15 +1382,15 @@ func (v *Validator) CheckVariables() {
v.Tree.Walk(checkNodeVars) v.Tree.Walk(checkNodeVars)
} }
func (v *Validator) CheckUnresolvedVariables() { func (v *Validator) CheckUnresolvedVariables() {
for _, ref := range v.Tree.References { for _, ref := range v.Tree.References {
if ref.IsVariable && ref.TargetVariable == nil { if ref.IsVariable && ref.TargetVariable == nil {
v.Diagnostics = append(v.Diagnostics, Diagnostic{ v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError, Level: LevelError,
Message: fmt.Sprintf("Unresolved variable reference: '@%s'", ref.Name), Message: fmt.Sprintf("Unresolved variable reference: '@%s'", ref.Name),
Position: ref.Position, Position: ref.Position,
File: ref.File, File: ref.File,
}) })
} }
} }
} }

View File

@@ -42,6 +42,10 @@ The LSP server should provide the following capabilities:
- **Rename Symbol**: Rename an object, field, or reference across the entire project scope. - **Rename Symbol**: Rename an object, field, or reference across the entire project scope.
- Supports renaming of Definitions (`+Name` or `Name`), preserving any modifiers (`+`/`$`). - Supports renaming of Definitions (`+Name` or `Name`), preserving any modifiers (`+`/`$`).
- Updates all references to the renamed symbol, including qualified references (e.g., `Pkg.Name`). - Updates all references to the renamed symbol, including qualified references (e.g., `Pkg.Name`).
- **Inlay Hints**: Provide real-time contextual information inline.
- **Signal Metadata**: Displays `::TYPE[ELEMENTSxDIMENSIONS]` next to signal names.
- **Object Class**: Displays `CLASS::` before object references.
- **Evaluation**: Displays results of expressions (` => RESULT`) and variable references (`(=> VALUE)`).
- **Code Snippets**: Provide snippets for common patterns (e.g., `+Object = { ... }`). - **Code Snippets**: Provide snippets for common patterns (e.g., `+Object = { ... }`).
- **Formatting**: Format the document using the same rules and engine as the `fmt` command. - **Formatting**: Format the document using the same rules and engine as the `fmt` command.
@@ -71,22 +75,31 @@ The LSP server should provide the following capabilities:
### Grammar ### Grammar
- `comment` : `//.*` - `comment` : `//.*`
- `configuration`: `definition+` - `configuration`: `(definition | macro)+`
- `definition`: `field = value | node = subnode` - `definition`: `field = value | node = subnode`
- `macro`: `package | variable | constant`
- `field`: `[a-zA-Z][a-zA-Z0-9_\-]*` - `field`: `[a-zA-Z][a-zA-Z0-9_\-]*`
- `node`: `[+$][a-zA-Z][a-zA-Z0-9_\-]*` - `node`: `[+$][a-zA-Z][a-zA-Z0-9_\-]*`
- `subnode`: `{ definition+ }` - `subnode`: `{ (definition | macro)+ }`
- `value`: `string|int|float|bool|reference|array` - `value`: `expression`
- `expression`: `atom | binary_expr | unary_expr`
- `atom`: `string | int | float | bool | reference | array | "(" expression ")"`
- `binary_expr`: `expression operator expression`
- `unary_expr`: `unary_operator expression`
- `operator`: `+ | - | * | / | % | & | | | ^ | ..`
- `unary_operator`: `- | !`
- `int`: `/-?[0-9]+|0b[01]+|0x[0-9a-fA-F]+` - `int`: `/-?[0-9]+|0b[01]+|0x[0-9a-fA-F]+`
- `float`: `-?[0-9]+\.[0-9]+|-?[0-9]+\.?[0-9]*e\-?[0-9]+` - `float`: `-?[0-9]+\.[0-9]+|-?[0-9]+\.?[0-9]*[eE][+-]?[0-9]+`
- `bool`: `true|false` - `bool`: `true|false`
- `string`: `".*"` - `string`: `".*"`
- `reference` : `string|.*` - `reference` : `[a-zA-Z][a-zA-Z0-9_\-\.]* | @[a-zA-Z0-9_]+ | $[a-zA-Z0-9_]+`
- `array`: `{ value }` - `array`: `{ (value | ",")* }`
#### Extended grammar #### Extended grammar
- `package` : `#package URI` - `package` : `#package URI`
- `variable`: `#var NAME: TYPE [= expression]`
- `constant`: `#let NAME: TYPE = expression`
- `URI`: `PROJECT | PROJECT.PRJ_SUB_URI` - `URI`: `PROJECT | PROJECT.PRJ_SUB_URI`
- `PRJ_SUB_URI`: `NODE | NODE.PRJ_SUB_URI` - `PRJ_SUB_URI`: `NODE | NODE.PRJ_SUB_URI`
- `docstring` : `//#.*` - `docstring` : `//#.*`
@@ -97,13 +110,17 @@ The LSP server should provide the following capabilities:
- **Nodes (`+` / `$`)**: The prefixes `+` and `$` indicate that the node represents an object. - **Nodes (`+` / `$`)**: The prefixes `+` and `$` indicate that the node represents an object.
- **Constraint**: These nodes _must_ contain a field named `Class` within their subnode definition (across all files where the node is defined). - **Constraint**: These nodes _must_ contain a field named `Class` within their subnode definition (across all files where the node is defined).
- **Signals**: Signals are considered nodes but **not** objects. They do not require a `Class` field. - **Signals**: Signals are considered nodes but **not** objects. They do not require a `Class` field.
- **Variables (`#var`)**: Define overrideable parameters. Can be overridden via CLI (`-vVAR=VAL`).
- **Constants (`#let`)**: Define fixed parameters. **Cannot** be overridden externally. Must have an initial value.
- **Expressions**: Evaluated during build and displayed evaluated in LSP hover documentation.
- **Docstrings (`//#`)**: Associated with the following definition (Node, Field, Variable, or Constant).
- **Pragmas (`//!`)**: Used to suppress specific diagnostics. The developer can use these to explain why a rule is being ignored. Supported pragmas: - **Pragmas (`//!`)**: Used to suppress specific diagnostics. The developer can use these to explain why a rule is being ignored. Supported pragmas:
- `//!unused: REASON` or `//!ignore(unused): REASON` - Suppress "Unused GAM" or "Unused Signal" warnings. - `//!unused: REASON` or `//!ignore(unused): REASON` - Suppress "Unused GAM" or "Unused Signal" warnings.
- `//!implicit: REASON` or `//!ignore(implicit): REASON` - Suppress "Implicitly Defined Signal" warnings. - `//!implicit: REASON` or `//!ignore(implicit): REASON` - Suppress "Implicitly Defined Signal" warnings.
- `//!allow(WARNING_TYPE): REASON` or `//!ignore(WARNING_TYPE): REASON` - Global suppression for a specific warning type across the whole project (supported: `unused`, `implicit`). - `//!allow(WARNING_TYPE): REASON` or `//!ignore(WARNING_TYPE): REASON` - Global suppression for a specific warning type across the whole project (supported: `unused`, `implicit`, `not_consumed`, `not_produced`).
- `//!cast(DEF_TYPE, CUR_TYPE): REASON` - Suppress "Type Inconsistency" errors if types match. - `//!cast(DEF_TYPE, CUR_TYPE): REASON` - Suppress "Type Inconsistency" errors if types match.
- **Structure**: A configuration is composed by one or more definitions. - **Structure**: A configuration is composed by one or more definitions or macros.
- **Strictness**: Any content that is not a valid comment (or pragma/docstring) or a valid definition (Field, Node, or Object) is **not allowed** and must generate a parsing error. - **Strictness**: Any content that is not a valid comment (or pragma/docstring) or a valid definition/macro is **not allowed** and must generate a parsing error.
### Core MARTe Classes ### Core MARTe Classes
@@ -124,6 +141,7 @@ MARTe configurations typically involve several main categories of objects:
- All signal definitions **must** include a `Type` field with a valid value. - All signal definitions **must** include a `Type` field with a valid value.
- **Size Information**: Signals can optionally include `NumberOfDimensions` and `NumberOfElements` fields. If not explicitly defined, these default to `1`. - **Size Information**: Signals can optionally include `NumberOfDimensions` and `NumberOfElements` fields. If not explicitly defined, these default to `1`.
- **Property Matching**: Signal references in GAMs must match the properties (`Type`, `NumberOfElements`, `NumberOfDimensions`) of the defined signal in the `DataSource`. - **Property Matching**: Signal references in GAMs must match the properties (`Type`, `NumberOfElements`, `NumberOfDimensions`) of the defined signal in the `DataSource`.
- **Consistency**: Implicit signals used across different GAMs must share the same `Type` and size properties.
- **Extensibility**: Signal definitions can include additional fields as required by the specific application context. - **Extensibility**: Signal definitions can include additional fields as required by the specific application context.
- **Signal Reference Syntax**: - **Signal Reference Syntax**:
- Signals are referenced or defined in `InputSignals` or `OutputSignals` sub-nodes using one of the following formats: - Signals are referenced or defined in `InputSignals` or `OutputSignals` sub-nodes using one of the following formats:
@@ -145,6 +163,7 @@ MARTe configurations typically involve several main categories of objects:
``` ```
In this case, `Alias` points to the DataSource signal name. In this case, `Alias` points to the DataSource signal name.
- **Implicit Definition Constraint**: If a signal is implicitly defined within a GAM, the `Type` field **must** be present in the reference block to define the signal's properties. - **Implicit Definition Constraint**: If a signal is implicitly defined within a GAM, the `Type` field **must** be present in the reference block to define the signal's properties.
- **Renaming**: Renaming a signal (explicit or implicit) via LSP updates all its usages across all GAMs and DataSources in the project. Local aliases (`Alias = Name`) are preserved while their targets are updated.
- **Directionality**: DataSources and their signals are directional: - **Directionality**: DataSources and their signals are directional:
- `Input` (IN): Only providing data. Signals can only be used in `InputSignals`. - `Input` (IN): Only providing data. Signals can only be used in `InputSignals`.
- `Output` (OUT): Only receiving data. Signals can only be used in `OutputSignals`. - `Output` (OUT): Only receiving data. Signals can only be used in `OutputSignals`.
@@ -155,9 +174,11 @@ MARTe configurations typically involve several main categories of objects:
The tool must build an index of the configuration to support LSP features and validations: The tool must build an index of the configuration to support LSP features and validations:
- **Recursive Indexing**: All `.marte` files in the project root and subdirectories are indexed automatically.
- **GAMs**: Referenced in `$APPLICATION.States.$STATE_NAME.Threads.$THREAD_NAME.Functions` (where `$APPLICATION` is a `RealTimeApplication` node). - **GAMs**: Referenced in `$APPLICATION.States.$STATE_NAME.Threads.$THREAD_NAME.Functions` (where `$APPLICATION` is a `RealTimeApplication` node).
- **Signals**: Referenced within the `InputSignals` and `OutputSignals` sub-nodes of a GAM. - **Signals**: Referenced within the `InputSignals` and `OutputSignals` sub-nodes of a GAM.
- **DataSources**: Referenced within the `DataSource` field of a signal reference/definition. - **DataSources**: Referenced within the `DataSource` field of a signal reference/definition.
- **Variables/Constants**: Referenced via `@NAME` or `$NAME` in expressions.
- **General References**: Objects can also be referenced in other fields (e.g., as targets for messages). - **General References**: Objects can also be referenced in other fields (e.g., as targets for messages).
### Validation Rules ### Validation Rules

View File

@@ -0,0 +1,78 @@
package integration
import (
"testing"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/formatter"
"bytes"
)
func TestAdvancedNumbers(t *testing.T) {
content := `
Hex = 0xFF
HexLower = 0xee
Binary = 0b1011
Decimal = 123
Scientific = 1e-3
`
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
// Verify values
foundHex := false
foundHexLower := false
foundBinary := false
for _, def := range cfg.Definitions {
if f, ok := def.(*parser.Field); ok {
if f.Name == "Hex" {
if v, ok := f.Value.(*parser.IntValue); ok {
if v.Value != 255 {
t.Errorf("Expected 255 for Hex, got %d", v.Value)
}
foundHex = true
}
}
if f.Name == "HexLower" {
if v, ok := f.Value.(*parser.IntValue); ok {
if v.Value != 238 {
t.Errorf("Expected 238 for HexLower, got %d", v.Value)
}
foundHexLower = true
} else {
t.Errorf("HexLower was parsed as %T, expected *parser.IntValue", f.Value)
}
}
if f.Name == "Binary" {
if v, ok := f.Value.(*parser.IntValue); ok {
if v.Value == 11 {
foundBinary = true
}
}
}
}
}
if !foundHex { t.Error("Hex field not found") }
if !foundHexLower { t.Error("HexLower field not found") }
if !foundBinary { t.Error("Binary field not found") }
// Verify formatting
var buf bytes.Buffer
formatter.Format(cfg, &buf)
formatted := buf.String()
if !contains(formatted, "Hex = 0xFF") {
t.Errorf("Formatted content missing Hex = 0xFF:\n%s", formatted)
}
if !contains(formatted, "HexLower = 0xee") {
t.Errorf("Formatted content missing HexLower = 0xee:\n%s", formatted)
}
if !contains(formatted, "Binary = 0b1011") {
t.Errorf("Formatted content missing Binary = 0b1011:\n%s", formatted)
}
}
func contains(s, substr string) bool {
return bytes.Contains([]byte(s), []byte(substr))
}

View File

@@ -0,0 +1,56 @@
package integration
import (
"os"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/builder"
)
func TestBuilderMergeNodes(t *testing.T) {
// Two files without package, defining SAME root node +App.
// This triggers merging logic in Builder.
content1 := `
+App = {
Field1 = 10
+Sub = { Val = 1 }
}
`
content2 := `
+App = {
Field2 = 20
+Sub = { Val2 = 2 }
}
`
f1, _ := os.CreateTemp("", "merge1.marte")
f1.WriteString(content1)
f1.Close()
defer os.Remove(f1.Name())
f2, _ := os.CreateTemp("", "merge2.marte")
f2.WriteString(content2)
f2.Close()
defer os.Remove(f2.Name())
b := builder.NewBuilder([]string{f1.Name(), f2.Name()}, nil)
outF, _ := os.CreateTemp("", "out_merge.marte")
defer os.Remove(outF.Name())
err := b.Build(outF)
if err != nil {
t.Fatalf("Build failed: %v", err)
}
outF.Close()
outContent, _ := os.ReadFile(outF.Name())
outStr := string(outContent)
if !strings.Contains(outStr, "Field1 = 10") { t.Error("Missing Field1") }
if !strings.Contains(outStr, "Field2 = 20") { t.Error("Missing Field2") }
if !strings.Contains(outStr, "+Sub = {") { t.Error("Missing Sub") }
if !strings.Contains(outStr, "Val = 1") { t.Error("Missing Sub.Val") }
if !strings.Contains(outStr, "Val2 = 2") { t.Error("Missing Sub.Val2") }
}

View File

@@ -0,0 +1,88 @@
package integration
import (
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestEvaluatedSignalProperties(t *testing.T) {
content := `
#let N: uint32 = 10
+DS = {
Class = FileReader
Filename = "test.bin"
Signals = {
Sig1 = { Type = uint32 NumberOfElements = @N }
}
}
+GAM = {
Class = IOGAM
InputSignals = {
Sig1 = { DataSource = DS Type = uint32 NumberOfElements = 10 }
}
}
`
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatal(err)
}
tree := index.NewProjectTree()
tree.AddFile("test.marte", cfg)
tree.ResolveReferences()
v := validator.NewValidator(tree, ".")
v.ValidateProject()
// There should be no errors because @N evaluates to 10
for _, d := range v.Diagnostics {
if d.Level == validator.LevelError {
t.Errorf("Unexpected error: %s", d.Message)
}
}
// Test mismatch with expression
contentErr := `
#let N: uint32 = 10
+DS = {
Class = FileReader
Filename = "test.bin"
Signals = {
Sig1 = { Type = uint32 NumberOfElements = @N + 5 }
}
}
+GAM = {
Class = IOGAM
InputSignals = {
Sig1 = { DataSource = DS Type = uint32 NumberOfElements = 10 }
}
}
`
p2 := parser.NewParser(contentErr)
cfg2, _ := p2.Parse()
tree2 := index.NewProjectTree()
tree2.AddFile("test_err.marte", cfg2)
tree2.ResolveReferences()
v2 := validator.NewValidator(tree2, ".")
v2.ValidateProject()
found := false
for _, d := range v2.Diagnostics {
if strings.Contains(d.Message, "property 'NumberOfElements' mismatch") {
found = true
if !strings.Contains(d.Message, "defined '15'") {
t.Errorf("Expected defined '15', got message: %s", d.Message)
}
break
}
}
if !found {
t.Error("Expected property mismatch error for @N + 5")
}
}

View File

@@ -0,0 +1,60 @@
package integration
import (
"os"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/builder"
)
func TestExpressionParsing(t *testing.T) {
content := `
#var A: int = 10
#var B: int = 2
+Obj = {
// 1. Multiple variables
Expr1 = @A + @B + @A
// 2. Brackets
Expr2 = (@A + 2) * @B
// 3. No space operator (variable name strictness)
Expr3 = @A-2
}
`
f, _ := os.CreateTemp("", "expr_test.marte")
f.WriteString(content)
f.Close()
defer os.Remove(f.Name())
b := builder.NewBuilder([]string{f.Name()}, nil)
outF, _ := os.CreateTemp("", "out.marte")
defer os.Remove(outF.Name())
err := b.Build(outF)
if err != nil {
t.Fatalf("Build failed: %v", err)
}
outF.Close()
outContent, _ := os.ReadFile(outF.Name())
outStr := string(outContent)
// Expr1: 10 + 2 + 10 = 22
if !strings.Contains(outStr, "Expr1 = 22") {
t.Errorf("Expr1 failed. Got:\n%s", outStr)
}
// Expr2: (10 + 2) * 2 = 24
if !strings.Contains(outStr, "Expr2 = 24") {
t.Errorf("Expr2 failed. Got:\n%s", outStr)
}
// Expr3: 10 - 2 = 8
if !strings.Contains(outStr, "Expr3 = 8") {
t.Errorf("Expr3 failed. Got:\n%s", outStr)
}
}

View File

@@ -0,0 +1,39 @@
package integration
import (
"os"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/builder"
)
func TestExpressionWhitespace(t *testing.T) {
content := `
+Obj = {
NoSpace = 2+2
WithSpace = 2 + 2
}
`
f, _ := os.CreateTemp("", "expr_ws.marte")
f.WriteString(content)
f.Close()
defer os.Remove(f.Name())
b := builder.NewBuilder([]string{f.Name()}, nil)
outF, _ := os.CreateTemp("", "out.marte")
defer os.Remove(outF.Name())
b.Build(outF)
outF.Close()
outContent, _ := os.ReadFile(outF.Name())
outStr := string(outContent)
if !strings.Contains(outStr, "NoSpace = 4") {
t.Errorf("NoSpace failed. Got:\n%s", outStr)
}
if !strings.Contains(outStr, "WithSpace = 4") {
t.Errorf("WithSpace failed. Got:\n%s", outStr)
}
}

View File

@@ -0,0 +1,55 @@
package integration
import (
"bytes"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/formatter"
"github.com/marte-community/marte-dev-tools/internal/parser"
)
func TestFormatterCoverage(t *testing.T) {
content := `
// Head comment
#package Pkg
//# Doc for A
+A = {
Field = 10 // Trailing
Bool = true
Float = 1.23
Ref = SomeObj
Array = { 1 2 3 }
Expr = 1 + 2
// Inner
+B = {
Val = "Str"
}
}
// Final
`
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
var buf bytes.Buffer
formatter.Format(cfg, &buf)
out := buf.String()
if !strings.Contains(out, "Field = 10") {
t.Error("Formatting failed")
}
// Check comments
if !strings.Contains(out, "// Head comment") {
t.Error("Head comment missing")
}
if !strings.Contains(out, "//# Doc for A") {
t.Error("Doc missing")
}
}

38
test/isolation_test.go Normal file
View File

@@ -0,0 +1,38 @@
package integration
import (
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
)
func TestIsolatedFileIsolation(t *testing.T) {
pt := index.NewProjectTree()
// File 1: Project file
f1 := "#package P\n+A = { Class = C }"
p1 := parser.NewParser(f1)
c1, _ := p1.Parse()
pt.AddFile("f1.marte", c1)
// File 2: Isolated file
f2 := "+B = { Class = C }"
p2 := parser.NewParser(f2)
c2, _ := p2.Parse()
pt.AddFile("f2.marte", c2)
pt.ResolveReferences()
// Try finding A from f2
isoNode := pt.IsolatedFiles["f2.marte"]
if pt.ResolveName(isoNode, "A", nil) != nil {
t.Error("Isolated file f2 should not see global A")
}
// Try finding B from f1
pNode := pt.Root.Children["P"]
if pt.ResolveName(pNode, "B", nil) != nil {
t.Error("Project file f1 should not see isolated B")
}
}

125
test/let_macro_test.go Normal file
View File

@@ -0,0 +1,125 @@
package integration
import (
"os"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/builder"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestLetMacroFull(t *testing.T) {
content := `
//# My documentation
#let MyConst: uint32 = 10 + 20
+Obj = {
Value = @MyConst
}
`
tmpFile, _ := os.CreateTemp("", "let_*.marte")
defer os.Remove(tmpFile.Name())
os.WriteFile(tmpFile.Name(), []byte(content), 0644)
// 1. Test Parsing & Indexing
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
tree := index.NewProjectTree()
tree.AddFile(tmpFile.Name(), cfg)
vars := tree.Root.Variables
if iso, ok := tree.IsolatedFiles[tmpFile.Name()]; ok {
vars = iso.Variables
}
info, ok := vars["MyConst"]
if !ok || !info.Def.IsConst {
t.Fatal("#let variable not indexed correctly as Const")
}
if info.Doc != "My documentation" {
t.Errorf("Expected doc 'My documentation', got '%s'", info.Doc)
}
// 2. Test Builder Evaluation
out, _ := os.CreateTemp("", "let_out.cfg")
defer os.Remove(out.Name())
b := builder.NewBuilder([]string{tmpFile.Name()}, nil)
if err := b.Build(out); err != nil {
t.Fatalf("Build failed: %v", err)
}
outContent, _ := os.ReadFile(out.Name())
if !strings.Contains(string(outContent), "Value = 30") {
t.Errorf("Expected Value = 30 (evaluated @MyConst), got:\n%s", string(outContent))
}
// 3. Test Override Protection
out2, _ := os.CreateTemp("", "let_out2.cfg")
defer os.Remove(out2.Name())
b2 := builder.NewBuilder([]string{tmpFile.Name()}, map[string]string{"MyConst": "100"})
if err := b2.Build(out2); err != nil {
t.Fatalf("Build failed: %v", err)
}
outContent2, _ := os.ReadFile(out2.Name())
if !strings.Contains(string(outContent2), "Value = 30") {
t.Errorf("Constant was overridden! Expected 30, got:\n%s", string(outContent2))
}
// 4. Test Validator (Mandatory Value)
contentErr := "#let BadConst: uint32"
p2 := parser.NewParser(contentErr)
cfg2, err2 := p2.Parse()
// Parser might fail if = is missing?
// parseLet expects =.
if err2 == nil {
// If parser didn't fail (maybe it was partial), validator should catch it
tree2 := index.NewProjectTree()
tree2.AddFile("err.marte", cfg2)
v := validator.NewValidator(tree2, ".")
v.ValidateProject()
found := false
for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "must have an initial value") {
found = true
break
}
}
if !found && cfg2 != nil {
// If p2.Parse() failed and added error to p2.errors, it's also fine.
// But check if it reached validator.
}
}
// 5. Test Duplicate Detection
contentDup := `
#let MyConst: uint32 = 10
#var MyConst: uint32 = 20
`
p3 := parser.NewParser(contentDup)
cfg3, _ := p3.Parse()
tree3 := index.NewProjectTree()
tree3.AddFile("dup.marte", cfg3)
v3 := validator.NewValidator(tree3, ".")
v3.ValidateProject()
foundDup := false
for _, d := range v3.Diagnostics {
if strings.Contains(d.Message, "Duplicate variable definition") {
foundDup = true
break
}
}
if !foundDup {
t.Error("Expected duplicate variable definition error")
}
}

View File

@@ -0,0 +1,45 @@
package integration
import (
"testing"
"github.com/marte-community/marte-dev-tools/internal/parser"
)
func TestLexerCoverage(t *testing.T) {
// 1. Comments
input := `
// Line comment
/* Block comment */
//# Docstring
//! Pragma
/* Unclosed block
`
l := parser.NewLexer(input)
for {
tok := l.NextToken()
if tok.Type == parser.TokenEOF {
break
}
}
// 2. Numbers
inputNum := `123 12.34 1.2e3 1.2E-3 0xFF`
lNum := parser.NewLexer(inputNum)
for {
tok := lNum.NextToken()
if tok.Type == parser.TokenEOF {
break
}
}
// 3. Identifiers
inputID := `Valid ID with-hyphen _under`
lID := parser.NewLexer(inputID)
for {
tok := lID.NextToken()
if tok.Type == parser.TokenEOF {
break
}
}
}

View File

@@ -10,6 +10,9 @@ import (
) )
func TestLoggerPrint(t *testing.T) { func TestLoggerPrint(t *testing.T) {
// Direct call for coverage
logger.Println("Coverage check")
if os.Getenv("TEST_LOGGER_PRINT") == "1" { if os.Getenv("TEST_LOGGER_PRINT") == "1" {
logger.Printf("Test Printf %d", 123) logger.Printf("Test Printf %d", 123)
logger.Println("Test Println") logger.Println("Test Println")

View File

@@ -79,11 +79,6 @@ func TestLSPAppTestRepro(t *testing.T) {
t.Error("LSP missing unresolved variable error") t.Error("LSP missing unresolved variable error")
} }
// Check INOUT consumed but not produced
if !strings.Contains(output, "consumed by GAM '+FnA'") {
t.Error("LSP missing consumed but not produced error")
}
if t.Failed() { if t.Failed() {
t.Log(output) t.Log(output)
} }

View File

@@ -1,167 +0,0 @@
package integration
import (
"bufio"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
"time"
)
func TestLSPBinaryDiagnostics(t *testing.T) {
// 1. Build mdt
// Ensure we are in test directory context
buildCmd := exec.Command("go", "build", "-o", "../build/mdt", "../cmd/mdt")
if output, err := buildCmd.CombinedOutput(); err != nil {
t.Fatalf("Failed to build mdt: %v\nOutput: %s", err, output)
}
// 2. Start mdt lsp
cmd := exec.Command("../build/mdt", "lsp")
stdin, _ := cmd.StdinPipe()
stdout, _ := cmd.StdoutPipe()
stderr, _ := cmd.StderrPipe()
// Pipe stderr to test log for debugging
go func() {
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
t.Logf("LSP STDERR: %s", scanner.Text())
}
}()
if err := cmd.Start(); err != nil {
t.Fatalf("Failed to start mdt lsp: %v", err)
}
defer func() {
cmd.Process.Kill()
cmd.Wait()
}()
reader := bufio.NewReader(stdout)
send := func(m interface{}) {
body, _ := json.Marshal(m)
msg := fmt.Sprintf("Content-Length: %d\r\n\r\n%s", len(body), body)
stdin.Write([]byte(msg))
}
readCh := make(chan map[string]interface{}, 100)
go func() { for {
// Parse Header
line, err := reader.ReadString('\n')
if err != nil {
close(readCh)
return
}
var length int
// Handle Content-Length: <len>\r\n
if _, err := fmt.Sscanf(strings.TrimSpace(line), "Content-Length: %d", &length); err != nil {
// Maybe empty line or other header?
continue
}
// Read until empty line (\r\n)
for {
l, err := reader.ReadString('\n')
if err != nil {
close(readCh)
return
}
if l == "\r\n" {
break
}
}
body := make([]byte, length)
if _, err := io.ReadFull(reader, body); err != nil {
close(readCh)
return
}
var m map[string]interface{}
if err := json.Unmarshal(body, &m); err == nil {
readCh <- m
}
}
}()
cwd, _ := os.Getwd()
projectRoot := filepath.Dir(cwd)
absPath := filepath.Join(projectRoot, "examples/app_test.marte")
uri := "file://" + absPath
// 3. Initialize
examplesDir := filepath.Join(projectRoot, "examples")
send(map[string]interface{}{
"jsonrpc": "2.0",
"id": 1,
"method": "initialize",
"params": map[string]interface{}{
"rootUri": "file://" + examplesDir,
},
})
// 4. Open app_test.marte
content, err := os.ReadFile(absPath)
if err != nil {
t.Fatalf("Failed to read test file: %v", err)
}
send(map[string]interface{}{
"jsonrpc": "2.0",
"method": "textDocument/didOpen",
"params": map[string]interface{}{
"textDocument": map[string]interface{}{
"uri": uri,
"languageId": "marte",
"version": 1,
"text": string(content),
},
},
})
// 5. Wait for diagnostics
foundOrdering := false
foundVariable := false
timeout := time.After(30 * time.Second)
for {
select {
case msg, ok := <-readCh:
if !ok {
t.Fatal("LSP stream closed unexpectedly")
}
t.Logf("Received: %v", msg)
if method, ok := msg["method"].(string); ok && method == "textDocument/publishDiagnostics" {
params := msg["params"].(map[string]interface{})
// Check URI match?
// if params["uri"] != uri { continue } // Might be absolute vs relative
diags := params["diagnostics"].([]interface{})
for _, d := range diags {
m := d.(map[string]interface{})["message"].(string)
if strings.Contains(m, "INOUT Signal 'A'") {
foundOrdering = true
t.Log("Found Ordering error")
}
if strings.Contains(m, "Unresolved variable reference: '@Value'") {
foundVariable = true
t.Log("Found Variable error")
}
}
if foundOrdering && foundVariable {
return // Success
}
}
case <-timeout:
t.Fatal("Timeout waiting for diagnostics")
}
}
}

View File

@@ -163,7 +163,7 @@ $App = {
} }
}) })
t.Run("Scope-aware suggestions", func(t *testing.T) { t.Run("Scope-aware suggestions", func(t *testing.T) {
setup() setup()
// Define a project DataSource in one file // Define a project DataSource in one file
cfg1, _ := parser.NewParser("#package MYPROJ.Data\n+ProjectDS = { Class = FileReader +Signals = { S1 = { Type = int32 } } }").Parse() cfg1, _ := parser.NewParser("#package MYPROJ.Data\n+ProjectDS = { Class = FileReader +Signals = { S1 = { Type = int32 } } }").Parse()
@@ -194,7 +194,7 @@ $App = {
} }
} }
if foundProjectDS { if foundProjectDS {
t.Error("Did not expect ProjectDS in isolated file suggestions") t.Error("Did not expect ProjectDS in isolated file suggestions (isolation)")
} }
// Completion in a project file // Completion in a project file
@@ -317,4 +317,66 @@ package schema
} }
} }
}) })
t.Run("Suggest Variables", func(t *testing.T) {
setup()
content := `
#var MyVar: uint = 10
+App = {
Field =
}
`
lsp.Documents[uri] = content
p := parser.NewParser(content)
cfg, _ := p.Parse()
lsp.Tree.AddFile(path, cfg)
// 1. Triggered by =
params := lsp.CompletionParams{
TextDocument: lsp.TextDocumentIdentifier{URI: uri},
Position: lsp.Position{Line: 3, Character: 12}, // After "Field = "
}
list := lsp.HandleCompletion(params)
if list == nil {
t.Fatal("Expected suggestions")
}
found := false
for _, item := range list.Items {
if item.Label == "@MyVar" {
found = true
break
}
}
if !found {
t.Error("Expected @MyVar in suggestions for =")
}
// 2. Triggered by @
// "Field = @"
lsp.Documents[uri] = `
#var MyVar: uint = 10
+App = {
Field = @
}
`
params2 := lsp.CompletionParams{
TextDocument: lsp.TextDocumentIdentifier{URI: uri},
Position: lsp.Position{Line: 3, Character: 13}, // After "Field = $"
}
list2 := lsp.HandleCompletion(params2)
if list2 == nil {
t.Fatal("Expected suggestions for @")
}
found = false
for _, item := range list2.Items {
if item.Label == "MyVar" { // suggestVariables returns "MyVar"
found = true
break
}
}
if !found {
t.Error("Expected MyVar in suggestions for @")
}
})
} }

View File

@@ -8,7 +8,9 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/lsp" "github.com/marte-community/marte-dev-tools/internal/lsp"
"github.com/marte-community/marte-dev-tools/internal/parser"
) )
func TestLSPIncrementalSync(t *testing.T) { func TestLSPIncrementalSync(t *testing.T) {
@@ -108,3 +110,82 @@ func TestLSPMalformedParams(t *testing.T) {
t.Errorf("Expected nil result for malformed params, got: %s", output) t.Errorf("Expected nil result for malformed params, got: %s", output)
} }
} }
func TestLSPDispatch(t *testing.T) {
var buf bytes.Buffer
lsp.Output = &buf
// Initialize
msgInit := &lsp.JsonRpcMessage{Method: "initialize", ID: 1, Params: json.RawMessage(`{}`)}
lsp.HandleMessage(msgInit)
// DidOpen
msgOpen := &lsp.JsonRpcMessage{Method: "textDocument/didOpen", Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte","text":""}}`)}
lsp.HandleMessage(msgOpen)
// DidChange
msgChange := &lsp.JsonRpcMessage{Method: "textDocument/didChange", Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte","version":2},"contentChanges":[{"text":"A"}]}`)}
lsp.HandleMessage(msgChange)
// Hover
msgHover := &lsp.JsonRpcMessage{Method: "textDocument/hover", ID: 2, Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte"},"position":{"line":0,"character":0}}`)}
lsp.HandleMessage(msgHover)
// Definition
msgDef := &lsp.JsonRpcMessage{Method: "textDocument/definition", ID: 3, Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte"},"position":{"line":0,"character":0}}`)}
lsp.HandleMessage(msgDef)
// References
msgRef := &lsp.JsonRpcMessage{Method: "textDocument/references", ID: 4, Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte"},"position":{"line":0,"character":0},"context":{"includeDeclaration":true}}`)}
lsp.HandleMessage(msgRef)
// Completion
msgComp := &lsp.JsonRpcMessage{Method: "textDocument/completion", ID: 5, Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte"},"position":{"line":0,"character":0}}`)}
lsp.HandleMessage(msgComp)
// Formatting
msgFmt := &lsp.JsonRpcMessage{Method: "textDocument/formatting", ID: 6, Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte"},"options":{"tabSize":4,"insertSpaces":true}}`)}
lsp.HandleMessage(msgFmt)
// Rename
msgRename := &lsp.JsonRpcMessage{Method: "textDocument/rename", ID: 7, Params: json.RawMessage(`{"textDocument":{"uri":"file://d.marte"},"position":{"line":0,"character":0},"newName":"B"}`)}
lsp.HandleMessage(msgRename)
}
func TestLSPVariableDefinition(t *testing.T) {
lsp.Tree = index.NewProjectTree()
lsp.Documents = make(map[string]string)
content := `
#var MyVar: int = 10
+Obj = {
Field = @MyVar
}
`
uri := "file://var_def.marte"
lsp.Documents[uri] = content
p := parser.NewParser(content)
cfg, _ := p.Parse()
lsp.Tree.AddFile("var_def.marte", cfg)
lsp.Tree.ResolveReferences()
params := lsp.DefinitionParams{
TextDocument: lsp.TextDocumentIdentifier{URI: uri},
Position: lsp.Position{Line: 3, Character: 13},
}
res := lsp.HandleDefinition(params)
if res == nil {
t.Fatal("Definition not found for variable")
}
locs, ok := res.([]lsp.Location)
if !ok || len(locs) == 0 {
t.Fatal("Expected location list")
}
if locs[0].Range.Start.Line != 1 {
t.Errorf("Expected line 1, got %d", locs[0].Range.Start.Line)
}
}

View File

@@ -92,13 +92,7 @@ func TestLSPDiagnosticsAppTest(t *testing.T) {
t.Error("Missing diagnostic for unresolved variable '@Value'") t.Error("Missing diagnostic for unresolved variable '@Value'")
} }
// 2. Check INOUT Ordering Error (Signal A consumed but not produced) // 2. Check INOUT Unused Warning (Signal B produced but not consumed)
// Message format: INOUT Signal 'A' (DS '+DDB') is consumed by GAM '+FnA' ... before being produced ...
if !strings.Contains(output, "INOUT Signal 'A'") || !strings.Contains(output, "before being produced") {
t.Error("Missing diagnostic for INOUT ordering error (Signal A)")
}
// 3. Check INOUT Unused Warning (Signal B produced but not consumed)
// Message format: INOUT Signal 'B' ... produced ... but never consumed ... // Message format: INOUT Signal 'B' ... produced ... but never consumed ...
if !strings.Contains(output, "INOUT Signal 'B'") || !strings.Contains(output, "never consumed") { if !strings.Contains(output, "INOUT Signal 'B'") || !strings.Contains(output, "never consumed") {
t.Error("Missing diagnostic for unused INOUT signal (Signal B)") t.Error("Missing diagnostic for unused INOUT signal (Signal B)")

101
test/lsp_fuzz_test.go Normal file
View File

@@ -0,0 +1,101 @@
package integration
import (
"math/rand"
"testing"
"time"
"github.com/marte-community/marte-dev-tools/internal/lsp"
)
func TestIncrementalFuzz(t *testing.T) {
// Initialize
lsp.Documents = make(map[string]string)
uri := "file://fuzz.marte"
currentText := ""
lsp.Documents[uri] = currentText
rand.Seed(time.Now().UnixNano())
// Apply 1000 random edits
for i := 0; i < 1000; i++ {
// Randomly choose Insert or Delete
isInsert := rand.Intn(2) == 0
change := lsp.TextDocumentContentChangeEvent{}
// Use simple ascii string
length := len(currentText)
if isInsert || length == 0 {
// Insert
pos := 0
if length > 0 {
pos = rand.Intn(length + 1)
}
insertStr := "X"
if rand.Intn(5) == 0 { insertStr = "\n" }
if rand.Intn(10) == 0 { insertStr = "longstring" }
// Calculate Line/Char for pos
line, char := offsetToLineChar(currentText, pos)
change.Range = &lsp.Range{
Start: lsp.Position{Line: line, Character: char},
End: lsp.Position{Line: line, Character: char},
}
change.Text = insertStr
// Expected
currentText = currentText[:pos] + insertStr + currentText[pos:]
} else {
// Delete
start := rand.Intn(length)
end := start + 1 + rand.Intn(length - start) // at least 1 char
// Range
l1, c1 := offsetToLineChar(currentText, start)
l2, c2 := offsetToLineChar(currentText, end)
change.Range = &lsp.Range{
Start: lsp.Position{Line: l1, Character: c1},
End: lsp.Position{Line: l2, Character: c2},
}
change.Text = ""
currentText = currentText[:start] + currentText[end:]
}
// Apply
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri, Version: i},
ContentChanges: []lsp.TextDocumentContentChangeEvent{change},
})
// Verify
if lsp.Documents[uri] != currentText {
t.Fatalf("Fuzz iteration %d failed.\nExpected len: %d\nGot len: %d\nChange: %+v", i, len(currentText), len(lsp.Documents[uri]), change)
}
}
}
func offsetToLineChar(text string, offset int) (int, int) {
line := 0
char := 0
for i, r := range text {
if i == offset {
return line, char
}
if r == '\n' {
line++
char = 0
} else {
char++
}
}
if offset == len(text) {
return line, char
}
return -1, -1
}

View File

@@ -0,0 +1,204 @@
package integration
import (
"bytes"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/lsp"
"github.com/marte-community/marte-dev-tools/internal/schema"
)
func TestIncrementalCorrectness(t *testing.T) {
lsp.Documents = make(map[string]string)
uri := "file://test.txt"
initial := "12345\n67890"
lsp.Documents[uri] = initial
// Edit 1: Insert "A" at 0:1 -> "1A2345\n67890"
change1 := lsp.TextDocumentContentChangeEvent{
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 1}, End: lsp.Position{Line: 0, Character: 1}},
Text: "A",
}
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
ContentChanges: []lsp.TextDocumentContentChangeEvent{change1},
})
if lsp.Documents[uri] != "1A2345\n67890" {
t.Errorf("Edit 1 failed: %q", lsp.Documents[uri])
}
// Edit 2: Delete newline (merge lines)
// "1A2345\n67890" -> "1A234567890"
// \n is at index 6.
// 0:6 points to \n? "1A2345" length is 6.
// So 0:6 is AFTER '5', at '\n'.
// 1:0 is AFTER '\n', at '6'.
// Range 0:6 - 1:0 covers '\n'.
change2 := lsp.TextDocumentContentChangeEvent{
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 6}, End: lsp.Position{Line: 1, Character: 0}},
Text: "",
}
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
ContentChanges: []lsp.TextDocumentContentChangeEvent{change2},
})
if lsp.Documents[uri] != "1A234567890" {
t.Errorf("Edit 2 failed: %q", lsp.Documents[uri])
}
// Edit 3: Add newline at end
// "1A234567890" len 11.
// 0:11.
change3 := lsp.TextDocumentContentChangeEvent{
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 11}, End: lsp.Position{Line: 0, Character: 11}},
Text: "\n",
}
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
ContentChanges: []lsp.TextDocumentContentChangeEvent{change3},
})
if lsp.Documents[uri] != "1A234567890\n" {
t.Errorf("Edit 3 failed: %q", lsp.Documents[uri])
}
}
func TestIncrementalAppValidation(t *testing.T) {
// Setup
lsp.Tree = index.NewProjectTree()
lsp.Documents = make(map[string]string)
lsp.GlobalSchema = schema.LoadFullSchema(".")
var buf bytes.Buffer
lsp.Output = &buf
content := `// Test app
+App = {
Class = RealTimeApplication
+Data = {
Class = ReferenceContainer
DefaultDataSource = DDB
+DDB = {
Class = GAMDataSource
}
+TimingDataSource = {
Class = TimingDataSource
}
}
+Functions = {
Class = ReferenceContainer
+A = {
Class = IOGAM
InputSignals = {
A = {
DataSource = DDB
Type = uint32
// Placeholder
}
}
OutputSignals = {
B = {
DataSource = DDB
Type = uint32
}
}
}
}
+States = {
Class = ReferenceContainer
+State = {
Class =RealTimeState
Threads = {
+Th1 = {
Class = RealTimeThread
Functions = {A}
}
}
}
}
+Scheduler = {
Class = GAMScheduler
TimingDataSource = TimingDataSource
}
}
`
uri := "file://app_inc.marte"
// 1. Open
lsp.HandleDidOpen(lsp.DidOpenTextDocumentParams{
TextDocument: lsp.TextDocumentItem{URI: uri, Text: content},
})
out := buf.String()
// Signal A is never produced. Should have consumed error.
if !strings.Contains(out, "ERROR: INOUT Signal 'A'") {
t.Error("Missing consumed error for A")
}
// Signal B is Output, never consumed.
if !strings.Contains(out, "WARNING: INOUT Signal 'B'") {
t.Error("Missing produced error for B")
}
buf.Reset()
// 2. Insert comment at start
// Expecting same errors
change1 := lsp.TextDocumentContentChangeEvent{
Range: &lsp.Range{Start: lsp.Position{Line: 0, Character: 0}, End: lsp.Position{Line: 0, Character: 0}},
Text: "// Comment\n",
}
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
ContentChanges: []lsp.TextDocumentContentChangeEvent{change1},
})
out = buf.String()
// Signal A is never produced. Should have consumed error.
if !strings.Contains(out, "ERROR: INOUT Signal 'A'") {
t.Error("Missing consumed error for A")
}
// Signal B is Output, never consumed.
if !strings.Contains(out, "WARNING: INOUT Signal 'B'") {
t.Error("Missing produced error for B")
}
buf.Reset()
// 3. Add Value to A
currentText := lsp.Documents[uri]
idx := strings.Index(currentText, "Placeholder")
if idx == -1 {
t.Fatal("Could not find anchor string")
}
idx = strings.Index(currentText[idx:], "\n") + idx
insertPos := idx + 1
line, char := offsetToLineChar(currentText, insertPos)
change2 := lsp.TextDocumentContentChangeEvent{
Range: &lsp.Range{Start: lsp.Position{Line: line, Character: char}, End: lsp.Position{Line: line, Character: char}},
Text: "Value = 10\n",
}
lsp.HandleDidChange(lsp.DidChangeTextDocumentParams{
TextDocument: lsp.VersionedTextDocumentIdentifier{URI: uri},
ContentChanges: []lsp.TextDocumentContentChangeEvent{change2},
})
out = buf.String()
// Signal A has now a Value field and so it is produced. Should NOT have consumed error.
if strings.Contains(out, "ERROR: INOUT Signal 'A'") {
t.Error("Unexpected consumed error for A")
}
// Signal B is Output, never consumed.
if !strings.Contains(out, "WARNING: INOUT Signal 'B'") {
t.Error("Missing produced error for B")
}
}

108
test/lsp_inlay_hint_test.go Normal file
View File

@@ -0,0 +1,108 @@
package integration
import (
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/lsp"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestLSPInlayHint(t *testing.T) {
// Setup
lsp.Tree = index.NewProjectTree()
lsp.Documents = make(map[string]string)
content := `
#let N : int= 10 + 5
+DS = {
Class = FileReader
Signals = {
Sig1 = { Type = uint32 NumberOfElements = 10 }
}
}
+GAM = {
Class = IOGAM
Expr = 10 + 20
InputSignals = {
Sig1 = { DataSource = DS }
}
}
+Other = {
Class = Controller
Ref = DS
VarRef = @N + 1
}
`
uri := "file://inlay.marte"
lsp.Documents[uri] = content
p := parser.NewParser(content)
cfg, _ := p.Parse()
lsp.Tree.AddFile("inlay.marte", cfg)
lsp.Tree.ResolveReferences()
v := validator.NewValidator(lsp.Tree, ".")
v.ValidateProject()
params := lsp.InlayHintParams{
TextDocument: lsp.TextDocumentIdentifier{URI: uri},
Range: lsp.Range{
Start: lsp.Position{Line: 0, Character: 0},
End: lsp.Position{Line: 20, Character: 0},
},
}
res := lsp.HandleInlayHint(params)
if len(res) == 0 {
t.Fatal("Expected inlay hints, got 0")
}
foundTypeHint := false
foundDSClassHint := false
foundGeneralRefHint := false
foundExprHint := false
foundVarRefHint := false
foundLetHint := false
for _, hint := range res {
t.Logf("Hint: '%s' at Line %d, Col %d", hint.Label, hint.Position.Line, hint.Position.Character)
if hint.Label == "::uint32[10x1]" {
foundTypeHint = true
}
if hint.Label == "FileReader::" && hint.Position.Line == 12 { // Sig1 line (DS)
foundDSClassHint = true
}
if hint.Label == "FileReader::" && hint.Position.Line == 17 { // Ref = DS line
foundGeneralRefHint = true
}
if hint.Label == " => 30" {
foundExprHint = true
}
if hint.Label == "(=> 15)" {
foundVarRefHint = true
}
if hint.Label == " => 15" && hint.Position.Line == 1 { // #let N line
foundLetHint = true
}
}
if !foundTypeHint {
t.Error("Did not find signal type/size hint")
}
if !foundDSClassHint {
t.Error("Did not find DataSource class hint")
}
if !foundGeneralRefHint {
t.Error("Did not find general object reference hint")
}
if !foundExprHint {
t.Error("Did not find expression evaluation hint")
}
if !foundVarRefHint {
t.Error("Did not find variable reference evaluation hint")
}
if !foundLetHint {
t.Error("Did not find #let expression evaluation hint")
}
}

View File

@@ -0,0 +1,88 @@
package integration
import (
"os"
"path/filepath"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/lsp"
)
func TestLSPRecursiveIndexing(t *testing.T) {
// Setup directory structure
rootDir, err := os.MkdirTemp("", "lsp_recursive")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(rootDir)
// root/main.marte
mainContent := `
#package App
+Main = {
Ref = SubComp
}
`
if err := os.WriteFile(filepath.Join(rootDir, "main.marte"), []byte(mainContent), 0644); err != nil {
t.Fatal(err)
}
// root/subdir/sub.marte
subDir := filepath.Join(rootDir, "subdir")
if err := os.Mkdir(subDir, 0755); err != nil {
t.Fatal(err)
}
subContent := `
#package App
+SubComp = { Class = Component }
`
if err := os.WriteFile(filepath.Join(subDir, "sub.marte"), []byte(subContent), 0644); err != nil {
t.Fatal(err)
}
// Initialize LSP
lsp.Tree = index.NewProjectTree()
lsp.Documents = make(map[string]string)
// Simulate ScanDirectory
if err := lsp.Tree.ScanDirectory(rootDir); err != nil {
t.Fatalf("ScanDirectory failed: %v", err)
}
lsp.Tree.ResolveReferences()
// Check if SubComp is in the tree
// Root -> App -> SubComp
appNode := lsp.Tree.Root.Children["App"]
if appNode == nil {
t.Fatal("App package not found")
}
subComp := appNode.Children["SubComp"]
if subComp == nil {
t.Fatal("SubComp not found in tree (recursive scan failed)")
}
mainURI := "file://" + filepath.Join(rootDir, "main.marte")
// Definition Request
params := lsp.DefinitionParams{
TextDocument: lsp.TextDocumentIdentifier{URI: mainURI},
Position: lsp.Position{Line: 3, Character: 12},
}
res := lsp.HandleDefinition(params)
if res == nil {
t.Fatal("Definition not found for SubComp")
}
locs, ok := res.([]lsp.Location)
if !ok || len(locs) == 0 {
t.Fatal("Expected location list")
}
expectedFile := filepath.Join(subDir, "sub.marte")
if locs[0].URI != "file://"+expectedFile {
t.Errorf("Expected definition in %s, got %s", expectedFile, locs[0].URI)
}
}

View File

@@ -47,7 +47,7 @@ func TestLSPSignalReferences(t *testing.T) {
// Find definition of MySig in MyDS // Find definition of MySig in MyDS
root := idx.IsolatedFiles["signal_refs.marte"] root := idx.IsolatedFiles["signal_refs.marte"]
if root == nil { if root == nil {
t.Fatal("Root node not found") t.Fatal("Root node not found (isolated)")
} }
// Traverse to MySig // Traverse to MySig

View File

@@ -0,0 +1,44 @@
package integration
import (
"bytes"
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/lsp"
"github.com/marte-community/marte-dev-tools/internal/schema"
)
func TestLSPValueValidation(t *testing.T) {
lsp.Tree = index.NewProjectTree()
lsp.Documents = make(map[string]string)
lsp.GlobalSchema = schema.LoadFullSchema(".")
var buf bytes.Buffer
lsp.Output = &buf
content := `
+Data = {
Class = ReferenceContainer
+DS = { Class = GAMDataSource Signals = { S = { Type = uint8 } } }
}
+GAM = {
Class = IOGAM
InputSignals = {
S = { DataSource = DS Type = uint8 Value = 1024 }
}
}
+App = { Class = RealTimeApplication +States = { Class = ReferenceContainer +S = { Class = RealTimeState Threads = { +T = { Class = RealTimeThread Functions = { GAM } } } } } }
`
uri := "file://value.marte"
lsp.HandleDidOpen(lsp.DidOpenTextDocumentParams{
TextDocument: lsp.TextDocumentItem{URI: uri, Text: content},
})
output := buf.String()
if !strings.Contains(output, "Value initialization mismatch") {
t.Error("LSP did not report value validation error")
t.Log(output)
}
}

View File

@@ -15,11 +15,23 @@ func TestOperators(t *testing.T) {
#var B: int = 20 #var B: int = 20
#var S1: string = "Hello" #var S1: string = "Hello"
#var S2: string = "World" #var S2: string = "World"
#var FA: float = 1.5
#var FB: float = 2.0
+Obj = { +Obj = {
Math = @A + @B Math = @A + @B
Precedence = @A + @B * 2 Precedence = @A + @B * 2
Concat = @S1 .. " " .. @S2 Concat = @S1 .. " " .. @S2
FloatMath = @FA + @FB
Mix = @A + @FA
ConcatNum = "Num: " .. @A
ConcatFloat = "F: " .. @FA
ConcatArr = "A: " .. { 1 }
BoolVal = true
RefVal = Obj
ArrVal = { 1 2 }
Unres = @Unknown
InvalidMath = "A" + 1
} }
` `
// Check Parser // Check Parser
@@ -55,4 +67,26 @@ func TestOperators(t *testing.T) {
if !strings.Contains(outStr, "Concat = \"Hello World\"") { if !strings.Contains(outStr, "Concat = \"Hello World\"") {
t.Errorf("Concat failed. Got:\n%s", outStr) t.Errorf("Concat failed. Got:\n%s", outStr)
} }
if !strings.Contains(outStr, "FloatMath = 3.5") {
t.Errorf("FloatMath failed. Got:\n%s", outStr)
}
// 10 + 1.5 = 11.5
if !strings.Contains(outStr, "Mix = 11.5") {
t.Errorf("Mix failed. Got:\n%s", outStr)
}
if !strings.Contains(outStr, "ConcatNum = \"Num: 10\"") {
t.Errorf("ConcatNum failed. Got:\n%s", outStr)
}
if !strings.Contains(outStr, "BoolVal = true") {
t.Errorf("BoolVal failed. Got:\n%s", outStr)
}
if !strings.Contains(outStr, "RefVal = Obj") {
t.Errorf("RefVal failed. Got:\n%s", outStr)
}
if !strings.Contains(outStr, "ArrVal = { 1 2 }") {
t.Errorf("ArrVal failed. Got:\n%s", outStr)
}
if !strings.Contains(outStr, "Unres = @Unknown") {
t.Errorf("Unres failed. Got:\n%s", outStr)
}
} }

View File

@@ -0,0 +1,54 @@
package integration
import (
"os"
"path/filepath"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
)
func TestRecursiveIndexing(t *testing.T) {
// Setup: root/level1/level2/deep.marte
rootDir, _ := os.MkdirTemp("", "rec_index")
defer os.RemoveAll(rootDir)
l1 := filepath.Join(rootDir, "level1")
l2 := filepath.Join(l1, "level2")
if err := os.MkdirAll(l2, 0755); err != nil {
t.Fatal(err)
}
content := "#package Deep\n+DeepObj = { Class = A }"
if err := os.WriteFile(filepath.Join(l2, "deep.marte"), []byte(content), 0644); err != nil {
t.Fatal(err)
}
// Also add a file in root to ensure mixed levels work
os.WriteFile(filepath.Join(rootDir, "root.marte"), []byte("#package Root\n+RootObj = { Class = A }"), 0644)
// Scan
tree := index.NewProjectTree()
err := tree.ScanDirectory(rootDir)
if err != nil {
t.Fatalf("Scan failed: %v", err)
}
// Verify Deep
deepPkg := tree.Root.Children["Deep"]
if deepPkg == nil {
t.Fatal("Package Deep not found")
}
if deepPkg.Children["DeepObj"] == nil {
t.Fatal("DeepObj not found in Deep package")
}
// Verify Root
rootPkg := tree.Root.Children["Root"]
if rootPkg == nil {
t.Fatal("Package Root not found")
}
if rootPkg.Children["RootObj"] == nil {
t.Fatal("RootObj not found in Root package")
}
}

View File

@@ -0,0 +1,84 @@
package integration
import (
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/schema"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestValidatorExpressionCoverage(t *testing.T) {
content := `
#var A: int = 10
#var B: int = 5
#var S1: string = "Hello"
#var S2: string = "World"
// Valid cases (execution hits evaluateBinary)
#var Sum: int = @A + @B // 15
#var Sub: int = @A - @B // 5
#var Mul: int = @A * @B // 50
#var Div: int = @A / @B // 2
#var Mod: int = @A % 3 // 1
#var Concat: string = @S1 .. " " .. @S2 // "Hello World"
#var Unary: int = -@A // -10
#var BitAnd: int = 10 & 5
#var BitOr: int = 10 | 5
#var BitXor: int = 10 ^ 5
#var FA: float = 1.5
#var FB: float = 2.0
#var FSum: float = @FA + @FB // 3.5
#var FSub: float = @FB - @FA // 0.5
#var FMul: float = @FA * @FB // 3.0
#var FDiv: float = @FB / @FA // 1.333...
#var BT: bool = true
#var BF: bool = !@BT
// Invalid cases (should error)
#var BadSum: int & > 20 = @A + @B // 15, should fail
#var BadUnary: bool = !10 // Should fail type check (nil result from evaluateUnary)
#var StrVar: string = "DS"
+InvalidDS = {
Class = IOGAM
InputSignals = {
S = { DataSource = 10 } // Int coverage
S2 = { DataSource = 1.5 } // Float coverage
S3 = { DataSource = true } // Bool coverage
S4 = { DataSource = @StrVar } // VarRef coverage -> String
S5 = { DataSource = { 1 } } // Array coverage (default case)
}
OutputSignals = {}
}
`
pt := index.NewProjectTree()
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatalf("Parse failed: %v", err)
}
pt.AddFile("expr.marte", cfg)
pt.ResolveReferences()
v := validator.NewValidator(pt, ".")
// Use NewSchema to ensure basic types
v.Schema = schema.NewSchema()
v.CheckVariables()
// Check for expected errors
foundBadSum := false
for _, diag := range v.Diagnostics {
if strings.Contains(diag.Message, "BadSum") && strings.Contains(diag.Message, "value mismatch") {
foundBadSum = true
}
}
if !foundBadSum {
t.Error("Expected error for BadSum")
}
}

View File

@@ -0,0 +1,101 @@
package integration
import (
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestINOUTValueInitialization(t *testing.T) {
content := `
+Data = {
Class = ReferenceContainer
+MyDS = {
Class = GAMDataSource
#meta = { multithreaded = false }
Signals = { Sig1 = { Type = uint32 } }
}
}
+GAM1 = {
Class = IOGAM
InputSignals = {
Sig1 = {
DataSource = MyDS
Type = uint32
Value = 10 // Initialization
}
}
}
+GAM2 = {
Class = IOGAM
InputSignals = {
Sig1 = { DataSource = MyDS Type = uint32 } // Consumes initialized signal
}
}
+App = {
Class = RealTimeApplication
+States = {
Class = ReferenceContainer
+State1 = {
Class = RealTimeState
+Thread1 = {
Class = RealTimeThread
Functions = { GAM1, GAM2 } // Should Pass
}
}
}
}
`
pt := index.NewProjectTree()
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatal(err)
}
pt.AddFile("main.marte", cfg)
v := validator.NewValidator(pt, ".")
v.ValidateProject()
for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "before being produced") {
t.Errorf("Unexpected error: %s", d.Message)
}
}
}
func TestINOUTValueTypeMismatch(t *testing.T) {
content := `
+Data = { Class = ReferenceContainer +DS = { Class = GAMDataSource #meta = { multithreaded = false } Signals = { S = { Type = uint8 } } } }
+GAM1 = {
Class = IOGAM
InputSignals = {
S = { DataSource = DS Type = uint8 Value = 1024 }
}
}
+App = { Class = RealTimeApplication +States = { Class = ReferenceContainer +S = { Class = RealTimeState Threads = { +T = { Class = RealTimeThread Functions = { GAM1 } } } } } }
`
pt := index.NewProjectTree()
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatal(err)
}
pt.AddFile("fail.marte", cfg)
v := validator.NewValidator(pt, ".")
v.ValidateProject()
found := false
for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "Value initialization mismatch") {
found = true
}
}
if !found {
t.Error("Expected Value initialization mismatch error")
}
}

View File

@@ -195,6 +195,6 @@ func TestIsolatedFileValidation(t *testing.T) {
} }
if ref.Target != nil { if ref.Target != nil {
t.Errorf("Expected reference in isolated file to be unresolved, but got target in %s", ref.Target.Fragments[0].File) t.Errorf("Isolation failure: reference in isolated file resolved to global object")
} }
} }

View File

@@ -0,0 +1,46 @@
package integration
import (
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestUnusedGAMValueValidation(t *testing.T) {
content := `
+Data = {
Class = ReferenceContainer
+DS = { Class = GAMDataSource Signals = { S = { Type = uint8 } } }
}
+UnusedGAM = {
Class = IOGAM
InputSignals = {
S = { DataSource = DS Type = uint8 Value = 1024 }
}
}
+App = { Class = RealTimeApplication }
`
pt := index.NewProjectTree()
p := parser.NewParser(content)
cfg, err := p.Parse()
if err != nil {
t.Fatal(err)
}
pt.AddFile("unused.marte", cfg)
v := validator.NewValidator(pt, ".")
v.ValidateProject()
found := false
for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "Value initialization mismatch") {
found = true
}
}
if !found {
t.Error("Expected Value initialization mismatch error for unused GAM")
}
}

View File

@@ -0,0 +1,87 @@
package integration
import (
"strings"
"testing"
"github.com/marte-community/marte-dev-tools/internal/index"
"github.com/marte-community/marte-dev-tools/internal/parser"
"github.com/marte-community/marte-dev-tools/internal/validator"
)
func TestMultiFileVariableResolution(t *testing.T) {
// File 1: Defines a variable in the root scope (no package)
file1Content := `#package Test
#var GlobalVal: int = 42`
// File 2: Uses the variable (no package)
file2Content := `
#package Test
+App = {
Class = RealTimeApplication
Field = @GlobalVal
}
`
pt := index.NewProjectTree()
// Parse and add File 1
p1 := parser.NewParser(file1Content)
cfg1, err := p1.Parse()
if err != nil {
t.Fatalf("Parse file1 error: %v", err)
}
pt.AddFile("vars.marte", cfg1)
// Parse and add File 2
p2 := parser.NewParser(file2Content)
cfg2, err := p2.Parse()
if err != nil {
t.Fatalf("Parse file2 error: %v", err)
}
pt.AddFile("main.marte", cfg2)
pt.ResolveReferences()
// Validate
// We need a dummy schema for CheckVariables to work, or we check References directly.
// CheckVariables validates types. CheckUnresolvedVariables validates existence.
// We want to check if $GlobalVal is resolved.
t.Logf("Root Variables keys: %v", getKeys(pt.Root.Variables))
v := validator.NewValidator(pt, ".")
v.CheckUnresolvedVariables()
for _, d := range v.Diagnostics {
if strings.Contains(d.Message, "Unresolved variable") {
t.Errorf("Unexpected unresolved variable error: %s", d.Message)
}
}
// Verify reference target directly
found := false
for _, ref := range pt.References {
if ref.Name == "GlobalVal" {
found = true
if ref.TargetVariable == nil {
t.Error("Reference 'GlobalVal' TargetVariable is nil (not resolved)")
} else {
if ref.TargetVariable.Name != "GlobalVal" {
t.Errorf("Reference resolved to wrong variable: %s", ref.TargetVariable.Name)
}
}
}
}
if !found {
t.Error("Reference 'GlobalVal' not found in index")
}
}
func getKeys(m map[string]index.VariableInfo) []string {
keys := []string{}
for k := range m {
keys = append(keys, k)
}
return keys
}