Added schema validation and schema db

This commit is contained in:
Martino Ferrari
2026-01-21 18:13:22 +01:00
parent f3c13fca55
commit 5a2b51ec34
10 changed files with 803 additions and 50 deletions

156
internal/schema/marte.json Normal file
View File

@@ -0,0 +1,156 @@
{
"classes": {
"RealTimeApplication": {
"fields": [
{"name": "Functions", "type": "node", "mandatory": true},
{"name": "Data", "type": "node", "mandatory": true},
{"name": "States", "type": "node", "mandatory": true}
]
},
"StateMachine": {
"fields": [
{"name": "States", "type": "node", "mandatory": true}
]
},
"GAMScheduler": {
"fields": [
{"name": "TimingDataSource", "type": "reference", "mandatory": true}
]
},
"TimingDataSource": {
"fields": []
},
"IOGAM": {
"fields": [
{"name": "InputSignals", "type": "node", "mandatory": false},
{"name": "OutputSignals", "type": "node", "mandatory": false}
]
},
"ReferenceContainer": {
"fields": []
},
"ConstantGAM": {
"fields": []
},
"PIDGAM": {
"fields": [
{"name": "Kp", "type": "float", "mandatory": true},
{"name": "Ki", "type": "float", "mandatory": true},
{"name": "Kd", "type": "float", "mandatory": true}
]
},
"FileDataSource": {
"fields": [
{"name": "Filename", "type": "string", "mandatory": true},
{"name": "Format", "type": "string", "mandatory": false}
]
},
"LoggerDataSource": {
"fields": []
},
"DANStream": {
"fields": [
{"name": "Timeout", "type": "int", "mandatory": false}
]
},
"EPICSCAInput": {
"fields": []
},
"EPICSCAOutput": {
"fields": []
},
"EPICSPVAInput": {
"fields": []
},
"EPICSPVAOutput": {
"fields": []
},
"SDNSubscriber": {
"fields": [
{"name": "Address", "type": "string", "mandatory": true},
{"name": "Port", "type": "int", "mandatory": true},
{"name": "Interface", "type": "string", "mandatory": false}
]
},
"SDNPublisher": {
"fields": [
{"name": "Address", "type": "string", "mandatory": true},
{"name": "Port", "type": "int", "mandatory": true},
{"name": "Interface", "type": "string", "mandatory": false}
]
},
"UDPReceiver": {
"fields": [
{"name": "Port", "type": "int", "mandatory": true},
{"name": "Address", "type": "string", "mandatory": false}
]
},
"UDPSender": {
"fields": [
{"name": "Destination", "type": "string", "mandatory": true}
]
},
"FileReader": {
"fields": [
{"name": "Filename", "type": "string", "mandatory": true},
{"name": "Format", "type": "string", "mandatory": false},
{"name": "Interpolate", "type": "string", "mandatory": false}
]
},
"FileWriter": {
"fields": [
{"name": "Filename", "type": "string", "mandatory": true},
{"name": "Format", "type": "string", "mandatory": false},
{"name": "StoreOnTrigger", "type": "int", "mandatory": false}
]
},
"OrderedClass": {
"ordered": true,
"fields": [
{"name": "First", "type": "int", "mandatory": true},
{"name": "Second", "type": "string", "mandatory": true}
]
},
"BaseLib2GAM": { "fields": [] },
"ConversionGAM": { "fields": [] },
"DoubleHandshakeGAM": { "fields": [] },
"FilterGAM": { "fields": [] },
"HistogramGAM": { "fields": [] },
"Interleaved2FlatGAM": { "fields": [] },
"FlattenedStructIOGAM": { "fields": [] },
"MathExpressionGAM": { "fields": [] },
"MessageGAM": { "fields": [] },
"MuxGAM": { "fields": [] },
"SimulinkWrapperGAM": { "fields": [] },
"SSMGAM": { "fields": [] },
"StatisticsGAM": { "fields": [] },
"TimeCorrectionGAM": { "fields": [] },
"TriggeredIOGAM": { "fields": [] },
"WaveformGAM": { "fields": [] },
"DAN": { "fields": [] },
"LinuxTimer": { "fields": [] },
"LinkDataSource": { "fields": [] },
"MDSReader": { "fields": [] },
"MDSWriter": { "fields": [] },
"NI1588TimeStamp": { "fields": [] },
"NI6259ADC": { "fields": [] },
"NI6259DAC": { "fields": [] },
"NI6259DIO": { "fields": [] },
"NI6368ADC": { "fields": [] },
"NI6368DAC": { "fields": [] },
"NI6368DIO": { "fields": [] },
"NI9157CircularFifoReader": { "fields": [] },
"NI9157MxiDataSource": { "fields": [] },
"OPCUADSInput": { "fields": [] },
"OPCUADSOutput": { "fields": [] },
"RealTimeThreadAsyncBridge": { "fields": [] },
"RealTimeThreadSynchronisation": { "fields": [] },
"UARTDataSource": { "fields": [] },
"BaseLib2Wrapper": { "fields": [] },
"EPICSCAClient": { "fields": [] },
"EPICSPVA": { "fields": [] },
"MemoryGate": { "fields": [] },
"OPCUA": { "fields": [] },
"SysLogger": { "fields": [] }
}
}

55
internal/schema/schema.go Normal file
View File

@@ -0,0 +1,55 @@
package schema
import (
_ "embed"
"encoding/json"
"fmt"
"os"
)
//go:embed marte.json
var defaultSchemaJSON []byte
type Schema struct {
Classes map[string]ClassDefinition `json:"classes"`
}
type ClassDefinition struct {
Fields []FieldDefinition `json:"fields"`
Ordered bool `json:"ordered"`
}
type FieldDefinition struct {
Name string `json:"name"`
Type string `json:"type"` // "int", "float", "string", "bool", "reference", "array", "node", "any"
Mandatory bool `json:"mandatory"`
}
func NewSchema() *Schema {
return &Schema{
Classes: make(map[string]ClassDefinition),
}
}
func LoadSchema(path string) (*Schema, error) {
content, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var s Schema
if err := json.Unmarshal(content, &s); err != nil {
return nil, fmt.Errorf("failed to parse schema: %v", err)
}
return &s, nil
}
// DefaultSchema returns a built-in schema with core MARTe classes
func DefaultSchema() *Schema {
var s Schema
if err := json.Unmarshal(defaultSchemaJSON, &s); err != nil {
panic(fmt.Sprintf("failed to parse default embedded schema: %v", err))
}
return &s
}

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"github.com/marte-dev/marte-dev-tools/internal/index"
"github.com/marte-dev/marte-dev-tools/internal/parser"
"github.com/marte-dev/marte-dev-tools/internal/schema"
)
type DiagnosticLevel int
@@ -23,69 +24,79 @@ type Diagnostic struct {
type Validator struct {
Diagnostics []Diagnostic
Tree *index.ProjectTree
Schema *schema.Schema
}
func NewValidator(tree *index.ProjectTree) *Validator {
return &Validator{Tree: tree}
return &Validator{
Tree: tree,
Schema: schema.DefaultSchema(),
}
}
func (v *Validator) ValidateProject() {
if v.Tree == nil || v.Tree.Root == nil {
if v.Tree == nil {
return
}
v.validateNode(v.Tree.Root)
if v.Tree.Root != nil {
v.validateNode(v.Tree.Root)
}
for _, node := range v.Tree.IsolatedFiles {
v.validateNode(node)
}
}
func (v *Validator) validateNode(node *index.ProjectNode) {
// Check for duplicate fields in this node
fields := make(map[string]string) // FieldName -> File
// Collect fields and their definitions
fields := make(map[string][]*parser.Field)
fieldOrder := []string{} // Keep track of order of appearance (approximate across fragments)
for _, frag := range node.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok {
if existingFile, exists := fields[f.Name]; exists {
// Duplicate field
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Duplicate Field Definition: '%s' is already defined in %s", f.Name, existingFile),
Position: f.Position,
File: frag.File,
})
} else {
fields[f.Name] = frag.File
if _, exists := fields[f.Name]; !exists {
fieldOrder = append(fieldOrder, f.Name)
}
fields[f.Name] = append(fields[f.Name], f)
}
}
}
// Check for mandatory Class if it's an object node (+/$)
// 1. Check for duplicate fields
for name, defs := range fields {
if len(defs) > 1 {
// Report error on the second definition
firstFile := v.getFileForField(defs[0], node)
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Duplicate Field Definition: '%s' is already defined in %s", name, firstFile),
Position: defs[1].Position,
File: v.getFileForField(defs[1], node),
})
}
}
// 2. Check for mandatory Class if it's an object node (+/$)
className := ""
if node.RealName != "" && (node.RealName[0] == '+' || node.RealName[0] == '$') {
hasClass := false
hasType := false
for _, frag := range node.Fragments {
for _, def := range frag.Definitions {
if f, ok := def.(*parser.Field); ok {
if f.Name == "Class" {
hasClass = true
}
if f.Name == "Type" {
hasType = true
}
}
}
if hasClass {
break
if classFields, ok := fields["Class"]; ok && len(classFields) > 0 {
// Extract class name from value
switch val := classFields[0].Value.(type) {
case *parser.StringValue:
className = val.Value
case *parser.ReferenceValue:
className = val.Value
}
}
if !hasClass && !hasType {
// Report error on the first fragment's position
pos := parser.Position{Line: 1, Column: 1}
file := ""
if len(node.Fragments) > 0 {
pos = node.Fragments[0].ObjectPos
file = node.Fragments[0].File
}
hasType := false
if _, ok := fields["Type"]; ok {
hasType = true
}
if className == "" && !hasType {
pos := v.getNodePosition(node)
file := v.getNodeFile(node)
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Node %s is an object and must contain a 'Class' field (or be a Signal with 'Type')", node.RealName),
@@ -95,12 +106,140 @@ func (v *Validator) validateNode(node *index.ProjectNode) {
}
}
// 3. Schema Validation
if className != "" && v.Schema != nil {
if classDef, ok := v.Schema.Classes[className]; ok {
v.validateClass(node, classDef, fields, fieldOrder)
}
}
// Recursively validate children
for _, child := range node.Children {
v.validateNode(child)
}
}
func (v *Validator) validateClass(node *index.ProjectNode, classDef schema.ClassDefinition, fields map[string][]*parser.Field, fieldOrder []string) {
// Check Mandatory Fields
for _, fieldDef := range classDef.Fields {
if fieldDef.Mandatory {
found := false
if _, ok := fields[fieldDef.Name]; ok {
found = true
} else if fieldDef.Type == "node" {
// Check children for nodes
if _, ok := node.Children[fieldDef.Name]; ok {
found = true
}
}
if !found {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Missing mandatory field '%s' for class '%s'", fieldDef.Name, node.Metadata["Class"]),
Position: v.getNodePosition(node),
File: v.getNodeFile(node),
})
}
}
}
// Check Field Types
for _, fieldDef := range classDef.Fields {
if fList, ok := fields[fieldDef.Name]; ok {
f := fList[0] // Check the first definition (duplicates handled elsewhere)
if !v.checkType(f.Value, fieldDef.Type) {
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Field '%s' expects type '%s'", fieldDef.Name, fieldDef.Type),
Position: f.Position,
File: v.getFileForField(f, node),
})
}
}
}
// Check Field Order
if classDef.Ordered {
// Verify that fields present in the node appear in the order defined in the schema
// Only consider fields that are actually in the schema's field list
schemaIdx := 0
for _, nodeFieldName := range fieldOrder {
// Find this field in schema
foundInSchema := false
for i, fd := range classDef.Fields {
if fd.Name == nodeFieldName {
foundInSchema = true
// Check if this field appears AFTER the current expected position
if i < schemaIdx {
// This field appears out of order (it should have appeared earlier, or previous fields were missing but this one came too late? No, simple relative order)
// Actually, simple check: `i` must be >= `lastSeenSchemaIdx`.
v.Diagnostics = append(v.Diagnostics, Diagnostic{
Level: LevelError,
Message: fmt.Sprintf("Field '%s' is out of order", nodeFieldName),
Position: fields[nodeFieldName][0].Position,
File: v.getFileForField(fields[nodeFieldName][0], node),
})
} else {
schemaIdx = i
}
break
}
}
if !foundInSchema {
// Ignore extra fields for order check? Spec doesn't say strict closed schema.
}
}
}
}
func (v *Validator) checkType(val parser.Value, expectedType string) bool {
switch expectedType {
case "int":
_, ok := val.(*parser.IntValue)
return ok
case "float":
_, ok := val.(*parser.FloatValue)
return ok
case "string":
_, ok := val.(*parser.StringValue)
return ok
case "bool":
_, ok := val.(*parser.BoolValue)
return ok
case "array":
_, ok := val.(*parser.ArrayValue)
return ok
case "reference":
_, ok := val.(*parser.ReferenceValue)
return ok
case "node":
// This is tricky. A field cannot really be a "node" type in the parser sense (Node = { ... } is an ObjectNode, not a Field).
// But if the schema says "FieldX" is type "node", maybe it means it expects a reference to a node?
// Or maybe it means it expects a Subnode?
// In MARTe, `Field = { ... }` is parsed as ArrayValue usually.
// If `Field = SubNode`, it's `ObjectNode`.
// Schema likely refers to `+SubNode = { ... }`.
// But `validateClass` iterates `fields`.
// If schema defines a "field" of type "node", it might mean it expects a child node with that name.
return true // skip for now
case "any":
return true
}
return true
}
func (v *Validator) getFileForField(f *parser.Field, node *index.ProjectNode) string {
for _, frag := range node.Fragments {
for _, def := range frag.Definitions {
if def == f {
return frag.File
}
}
}
return ""
}
func (v *Validator) CheckUnused() {
referencedNodes := make(map[*index.ProjectNode]bool)
for _, ref := range v.Tree.References {
@@ -109,7 +248,12 @@ func (v *Validator) CheckUnused() {
}
}
v.checkUnusedRecursive(v.Tree.Root, referencedNodes)
if v.Tree.Root != nil {
v.checkUnusedRecursive(v.Tree.Root, referencedNodes)
}
for _, node := range v.Tree.IsolatedFiles {
v.checkUnusedRecursive(node, referencedNodes)
}
}
func (v *Validator) checkUnusedRecursive(node *index.ProjectNode, referenced map[*index.ProjectNode]bool) {
@@ -172,4 +316,4 @@ func (v *Validator) getNodeFile(node *index.ProjectNode) string {
return node.Fragments[0].File
}
return ""
}
}