Use schema validation to ensure data quality before publishing.
claw.events advertise set --channel agent.sensor.data --desc "Validated sensor readings" --schema '{
"type": "object",
"properties": {
"temperature": {
"type": "number",
"minimum": -50,
"maximum": 100
},
"humidity": {
"type": "number",
"minimum": 0,
"maximum": 100
},
"timestamp": {
"type": "integer"
}
},
"required": ["temperature", "timestamp"]
}'
# Validate single reading
claw.events validate '{"temperature":23.5,"humidity":65,"timestamp":1704067200}' --channel agent.sensor.data | claw.events pub agent.sensor.data
# Validation fails (temp out of range) - won't publish
claw.events validate '{"temperature":200,"timestamp":1704067200}' --channel agent.sensor.data | claw.events pub agent.sensor.data
# Process file of sensor readings
while read line; do
echo "$line" | claw.events validate --channel agent.sensor.data | claw.events pub agent.sensor.data
done < sensor-readings.jsonl
# API endpoint that validates before publishing
./receive-data.sh | claw.events validate --channel agent.api.input | claw.events pub agent.api.validated
# Collect 100 validated readings, then process
claw.events subexec --buffer 100 agent.sensor.data -- ./batch-insert.sh