feat: implement history push mutation and enhanced MCP logging

- Implement pushHistory mutation to support pushing telemetry data to Zabbix trapper items.

- Add VERBOSITY and MCP_LOG_* environment variables for controllable request/response logging in both API and MCP server.

- Enhance ZabbixRESTDataSource with better session handling and error logging.

- Update ZabbixHistory datasource to support history push operations.

- Expand documentation with new cookbook recipes and MCP integration guides.

- Add integration tests for history pushing (src/test/history_push*).

- Reorganize documentation, moving technical product info PDF to docs/use-cases/.

- Update GraphQL generated types and VCR templates.
This commit is contained in:
Andreas Hilbig 2026-02-03 13:29:42 +01:00
parent b646b8c606
commit 7c2dee2b6c
28 changed files with 6036 additions and 3088 deletions

43
.idea/workspace.xml generated
View file

@ -5,34 +5,30 @@
</component>
<component name="ChangeListManager">
<list default="true" id="d7a71994-2699-4ae4-9fd2-ee13b7f33d35" name="Changes" comment="docs: refactor documentation and upgrade to Node.js 24&#10;&#10;This commit upgrades the project to Node.js 24 (LTS) and performs a major refactoring of the documentation to support both advanced users and AI-based automation (MCP).&#10;&#10;Changes:&#10;- Environment &amp; CI/CD:&#10; - Set Node.js version to &gt;=24 in package.json and .nvmrc.&#10; - Updated Dockerfile to use Node 24 base image.&#10; - Updated @types/node to ^24.10.9.&#10;- Documentation:&#10; - Refactored README.md with comprehensive technical reference, configuration details, and Zabbix-to-GraphQL mapping.&#10; - Created docs/howtos/cookbook.md with practical recipes for common tasks and AI test generation.&#10; - Updated docs/howtos/mcp.md to emphasize GraphQL's advantages for AI agents and Model Context Protocol.&#10; - Added readme.improvement.plan.md to track documentation evolution.&#10; - Enhanced all how-to guides with improved cross-references and up-to-date information.&#10;- Guidelines:&#10; - Updated .junie/guidelines.md with Node 24 requirements and enhanced commit message standards (Conventional Commits 1.0.0).&#10;- Infrastructure &amp; Code:&#10; - Updated docker-compose.yml with Apollo MCP server integration.&#10; - Refined configuration and schema handling in src/api/ and src/datasources/.&#10; - Synchronized generated TypeScript types with schema updates.">
<change afterPath="$PROJECT_DIR$/docs/queries/sample_distance_tracker_test_query.graphql" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/test/schema_dependent_queries.test.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/runConfigurations/index_ts.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/runConfigurations/index_ts.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/docs/use-cases/VCR - Technical product information.pdf" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/testdata/templates/zbx_device_tracker_vcr.yaml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/README.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docker-compose.yml" beforeDir="false" afterPath="$PROJECT_DIR$/docker-compose.yml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/VCR - Technical product information.pdf" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/docs/howtos/cookbook.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/howtos/cookbook.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/howtos/query_optimization.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/howtos/query_optimization.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/howtos/schema.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/howtos/schema.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/howtos/mcp.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/howtos/mcp.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/queries/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/queries/README.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/tests.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/tests.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/mcp/operations/runAllRegressionTests.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/mcp/operations/runAllRegressionTests.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/schema/extensions/display_devices.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/samples/extensions/display_devices.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/schema/extensions/ground_value_checker.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/samples/extensions/ground_value_checker.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/schema/extensions/location_tracker_commons.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/samples/extensions/location_tracker_commons.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/schema/extensions/location_tracker_devices.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/samples/extensions/location_tracker_devices.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/schema/extensions/weather_sensor.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/samples/extensions/weather_sensor.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/docs/use-cases/trade-fair-logistics-requirements.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/use-cases/trade-fair-logistics-requirements.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/mcp-config.yaml" beforeDir="false" afterPath="$PROJECT_DIR$/mcp-config.yaml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/samples/extensions/location_tracker_devices.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/samples/extensions/location_tracker_devices.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/schema/mutations.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/schema/mutations.graphql" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/api/resolvers.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/api/resolvers.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/datasources/graphql-params-to-zabbix-output.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/datasources/graphql-params-to-zabbix-output.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/datasources/zabbix-hosts.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/datasources/zabbix-hosts.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/datasources/zabbix-templates.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/datasources/zabbix-templates.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/execution/host_importer.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/execution/host_importer.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/api/start.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/api/start.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/common_utils.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/common_utils.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/datasources/zabbix-history.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/datasources/zabbix-history.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/datasources/zabbix-items.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/datasources/zabbix-items.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/datasources/zabbix-request.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/datasources/zabbix-request.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/execution/host_exporter.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/execution/host_exporter.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/execution/regression_test_executor.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/execution/regression_test_executor.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/execution/template_importer.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/execution/template_importer.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/schema/generated/graphql.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/schema/generated/graphql.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/test/host_query.test.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/test/host_query.test.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/test/query_optimization.test.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/test/query_optimization.test.ts" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/testdata/templates/zbx_default_templates_vcr.yaml" beforeDir="false" afterPath="$PROJECT_DIR$/src/testdata/templates/zbx_default_templates_vcr.yaml" afterDir="false" />
</list>
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
@ -43,7 +39,7 @@
<execution />
</component>
<component name="EmbeddingIndexingInfo">
<option name="cachedIndexableFilesCount" value="163" />
<option name="cachedIndexableFilesCount" value="169" />
<option name="fileBasedEmbeddingIndicesEnabled" value="true" />
</component>
<component name="Git.Settings">
@ -111,7 +107,7 @@
"go.import.settings.migrated": "true",
"javascript.preferred.runtime.type.id": "node",
"junie.onboarding.icon.badge.shown": "true",
"last_opened_file_path": "//wsl.localhost/Ubuntu/home/ahilbig/git/vcr/zabbix-graphql-api/docs",
"last_opened_file_path": "//wsl.localhost/Ubuntu/home/ahilbig/git/vcr/zabbix-graphql-api/docs/use-cases",
"node.js.detected.package.eslint": "true",
"node.js.detected.package.tslint": "true",
"node.js.selected.package.eslint": "(autodetect)",
@ -139,11 +135,11 @@
</component>
<component name="RecentsManager">
<key name="CopyFile.RECENT_KEYS">
<recent name="\\wsl.localhost\Ubuntu\home\ahilbig\git\vcr\zabbix-graphql-api\docs\use-cases" />
<recent name="\\wsl.localhost\Ubuntu\home\ahilbig\git\vcr\zabbix-graphql-api\docs" />
<recent name="\\wsl.localhost\Ubuntu\home\ahilbig\git\vcr\zabbix-graphql-api\src" />
<recent name="\\wsl.localhost\Ubuntu\home\ahilbig\git\vcr\zabbix-graphql-api\dist" />
<recent name="\\wsl.localhost\Ubuntu\home\ahilbig\git\vcr\zabbix-graphql-api\src\testdata\templates" />
<recent name="\\wsl.localhost\Ubuntu\home\ahilbig\git\vcr\zabbix-graphql-api\src\test" />
</key>
<key name="MoveFile.RECENT_KEYS">
<recent name="\\wsl.localhost\Ubuntu\home\ahilbig\git\vcr\zabbix-graphql-api\schema" />
@ -195,9 +191,9 @@
<item itemvalue="Node.js.index.ts" />
<item itemvalue="npm.compile" />
<item itemvalue="npm.codegen" />
<item itemvalue="npm.test" />
<item itemvalue="npm.copy-schema" />
<item itemvalue="npm.prod" />
<item itemvalue="npm.test" />
</list>
<recent_temporary>
<list>
@ -235,6 +231,7 @@
<workItem from="1769789496322" duration="14281000" />
<workItem from="1769849767328" duration="18404000" />
<workItem from="1769955114366" duration="3276000" />
<workItem from="1770107035156" duration="3830000" />
</task>
<task id="LOCAL-00001" summary="chore: Update IntelliJ workspace settings and add GitHub Actions workflow for Docker deployment">
<option name="closed" value="true" />
@ -504,7 +501,7 @@
</line-breakpoint>
<line-breakpoint enabled="true" type="javascript">
<url>file://$PROJECT_DIR$/src/datasources/zabbix-request.ts</url>
<line>253</line>
<line>254</line>
<option name="timeStamp" value="6" />
</line-breakpoint>
</breakpoints>

View file

@ -99,7 +99,11 @@ The API is configured via environment variables. Create a `.env` file or set the
| `SCHEMA_PATH` | Path to schema files | `./schema/` | No |
| `ADDITIONAL_SCHEMAS` | Comma-separated list of additional schema files | - | No |
| `ADDITIONAL_RESOLVERS` | Comma-separated list of resolver types to generate | - | No |
| `LOG_LEVEL` | Log level configuration (e.g. `debug`, `info`, `warn`, `error`) | `info` | No |
| `LOG_LEVELS` | Comma-separated list of log levels to enable (e.g. `DEBUG,INFO,ERROR`) | - | No |
| `VERBOSITY` | Verbosity level for GraphQL logging (0=off, 1=parameters, 2=parameters+responses) | `0` | No |
| `MCP_LOG_LEVEL` | Log level for the MCP server | `info` | No |
| `MCP_LOG_PARAMETERS` | Whether to log parameters of MCP calls | `false` | No |
| `MCP_LOG_RESPONSES` | Whether to log responses of MCP calls | `false` | No |
| `HOST_TYPE_FILTER_DEFAULT` | Default filter for host types | - | No |
| `HOST_GROUP_FILTER_DEFAULT` | Default filter for host groups | - | No |
@ -194,7 +198,7 @@ The **Virtual Control Room (VCR)** is a professional cockpit and control center
- **Data Visualization**: It uses the `exportHostValueHistory` endpoint to power dashboards showing historical trends, such as traffic density, battery levels, or sensor readings over time.
For more detailed information about the VCR product, please refer to the technical presentation:
[VCR - Technical product information](docs/VCR%20-%20Technical%20product%20information.pdf)
[VCR - Technical product information](docs/use-cases/VCR%20-%20Technical%20product%20information.pdf)
## Sample Environment File

View file

@ -11,6 +11,8 @@ services:
environment:
- SCHEMA_PATH=/usr/app/dist/schema/
- ZABBIX_DEVELOPMENT_TOKEN=${ZABBIX_DEVELOPMENT_TOKEN}
- VERBOSITY=${VERBOSITY:-0}
- LOG_LEVELS=${LOG_LEVELS:-INFO}
volumes:
- ./samples:/usr/app/dist/samples
@ -25,6 +27,9 @@ services:
command: /mcp-config.yaml
environment:
- APOLLO_GRAPH_REF=local@main
- MCP_LOG_LEVEL=${MCP_LOG_LEVEL:-info}
- MCP_LOG_PARAMETERS=${MCP_LOG_PARAMETERS:-false}
- MCP_LOG_RESPONSES=${MCP_LOG_RESPONSES:-false}
depends_on:
schema-gen:
condition: service_completed_successfully

View file

@ -346,6 +346,91 @@ Create a host, assign it macros for coordinates, and query its state.
---
## 🍳 Recipe: Extending Schema with a Simulated Device (Zabbix Trap)
This recipe demonstrates how to create a simulated device type that receives data via Zabbix Trapper items. This is useful for testing or for devices that push their state (like Path/GeoJSON data) to Zabbix instead of being polled.
### 📋 Prerequisites
- Zabbix GraphQL API is running.
- You have an external script or system capable of pushing data to Zabbix (e.g. using `zabbix_sender` or the `pushHistory` mutation).
### 🛠️ Step 1: Define the Schema Extension
Add the `TrackedDevice` type to `samples/extensions/location_tracker_devices.graphql`:
```graphql
type TrackedDevice implements Host & Device {
hostid: ID!
host: String!
deviceType: String
hostgroups: [HostGroup!]
name: String
tags: DeviceConfig
inventory: Inventory
items: [ZabbixItem!]
state: TrackedState
}
type TrackedState implements DeviceState {
operational: OperationalDeviceData
current: TrackedValues
}
type TrackedValues {
"""
GeoJSON representation of the tracked device's location or path.
"""
geojson: JSONObject
}
```
### ⚙️ Step 2: Register the Resolver
Add `TrackedDevice` to your `.env` file:
```env
ADDITIONAL_RESOLVERS=...,TrackedDevice
```
Restart the API server.
### 🚀 Step 3: Import the Simulated Device Template
Use the `importTemplates` mutation to create a template with a **Zabbix Trapper** item. We use the `json_` prefix in the item key to ensure the JSON string is automatically parsed into a `JSONObject`.
> **Reference**: Use the [Sample: Import Simulated BT Template](../../docs/queries/sample_import_simulated_bt_template.graphql) for a complete mutation and variables example.
### 🚀 Step 4: Push History Data
Push GeoJSON data to your simulated device using the `pushHistory` mutation. This allows you to simulate a device moving along a path by providing multiple data points with different timestamps.
> **Reference**: See the [Sample: Push GeoJSON History](../../docs/queries/sample_push_geojson_history.graphql) for a complete example of pushing historical data.
### ✅ Step 5: Verification
Verify that the device correctly resolves to the new type and that both the current state and historical data are accessible.
- **Create Host**: Use the `importHosts` mutation to create a host (e.g. `Vehicle1`) and link it to the simulated template.
- **Query Current State**: Use the `allDevices` query to verify the latest pushed position.
- *Reference*: See the [Sample: Tracked Device Query](../../docs/queries/sample_tracked_device_query.graphql).
- **Query Historical Data**: Use the `exportHostValueHistory` query to retrieve the path history.
```graphql
query GetVehicleHistory($host: [String!], $key: [String!]) {
exportHostValueHistory(
host_filter: $host,
itemKey_filter: $key,
type: TEXT,
limit: 100
) {
result
}
}
```
**Variables**:
```json
{
"host": ["Vehicle1"],
"key": ["state.current.json_geojson"]
}
```
---
## 🍳 Recipe: Testing Specialized Device Types
This recipe shows how to execute a comprehensive query to verify the state and configuration of specialized device types, such as the `DistanceTrackerDevice`. This is useful for validating that your schema extensions and hierarchical mappings are working correctly.
@ -586,6 +671,77 @@ You can ask **Junie** to automate the entire cloning process:
---
## 🍳 Recipe: Pushing History Data to Trapper Items
This recipe shows how to push data into Zabbix items of type `ZABBIX_TRAP` using the `pushHistory` mutation. This is particularly useful for IoT devices or external systems that push data to Zabbix instead of being polled.
### 📋 Prerequisites
- Zabbix GraphQL API is running.
- A host exists in Zabbix.
- An item of type `ZABBIX_TRAP` (type 2) exists on that host.
### 🛠️ Step 1: Preparation
Identify the `itemid` or the combination of `host` and `key` for the target item.
### 🚀 Step 2: Execution/Action
Execute the `pushHistory` mutation. You can provide multiple values with different timestamps. The `value` field accepts a `JSONObject`, which will be automatically stringified before being sent to Zabbix.
```graphql
mutation PushDeviceData($host: String, $key: String, $itemid: Int, $values: [HistoryPushInput!]!) {
pushHistory(host: $host, key: $key, itemid: $itemid, values: $values) {
message
data {
itemid
error {
message
}
}
}
}
```
**Sample Variables**:
```json
{
"host": "IoT-Sensor-01",
"key": "sensor.data.json",
"values": [
{
"timestamp": "2024-01-01T12:00:00Z",
"value": {
"temperature": 22.5,
"humidity": 45
}
},
{
"timestamp": "2024-01-01T12:01:00Z",
"value": {
"temperature": 22.6,
"humidity": 44
}
}
]
}
```
### ✅ Step 3: Verification
Verify that the data was successfully pushed by querying the item's last value:
```graphql
query VerifyPushedData($host: String!) {
allHosts(filter_host: $host) {
items {
name
key_
lastvalue
lastclock
}
}
}
```
---
## 🍳 Recipe: Setting up GraphQL MCP for AI Agents
This recipe guides you through setting up the Model Context Protocol (MCP) server to enable AI agents like **Junie** or **Claude** to interact with your Zabbix data through the GraphQL API.

View file

@ -102,3 +102,17 @@ The MCP server can be used in conjunction with the [**Cookbook**](./cookbook.md)
Example prompt for an LLM:
> "Using the `zabbix-graphql` MCP server, follow the 'Provisioning a New Host' recipe from the cookbook. Create a host named 'Test-Host-01' in the 'Linux servers' group and link the 'ICMP Ping' template."
### 📝 Logging & Verbosity
You can control the logging level and verbosity of both the GraphQL API and the MCP server via environment variables. This is particularly useful for debugging MCP calls and seeing the exact parameters and responses.
- **GraphQL API Verbosity**:
- `VERBOSITY=1`: Logs GraphQL operation names and parameters (variables).
- `VERBOSITY=2`: Logs operation names, parameters, and the full response body.
- **MCP Server Logging**:
- `MCP_LOG_LEVEL`: Sets the log level for the Apollo MCP server (`debug`, `info`, `warn`, `error`).
- `MCP_LOG_PARAMETERS=true`: Enables logging of parameters in the MCP server.
- `MCP_LOG_RESPONSES=true`: Enables logging of responses in the MCP server.
When running via Docker Compose, these can be set in your `.env` file.

View file

@ -8,12 +8,15 @@ This directory contains practical examples of GraphQL operations for the Zabbix
- [Query All Hosts](./sample_all_hosts_query.graphql): Retrieve basic host information and inventory.
- [Import Hosts](./sample_import_hosts_mutation.graphql): Create or update multiple hosts with tags and group assignments.
- [Query All Devices](./sample_all_devices_query.graphql): Query specialized devices using the `allDevices` query.
- [Tracked Device Query](./sample_tracked_device_query.graphql): Query simulated or tracked devices.
- [Push GeoJSON History](./sample_push_geojson_history.graphql): Push multiple GeoJSON data points to a tracked device.
- [Distance Tracker Test Query](./sample_distance_tracker_test_query.graphql): Comprehensive query for testing specialized `DistanceTrackerDevice` types.
### 📄 Templates
- [Query Templates](./sample_templates_query.graphql): List available templates and their items.
- [Import Templates](./sample_import_templates_mutation.graphql): Create or update complex templates with item definitions and preprocessing.
- [Import Distance Tracker Template](./sample_import_distance_tracker_template.graphql): Example of importing a template for a schema extension.
- [Import Simulated BT Template](./sample_import_simulated_bt_template.graphql): Example of importing a template for a simulated device.
- [Delete Templates](./sample_delete_templates_mutation.graphql): Remove templates by ID or name pattern.
### 📂 Template Groups

View file

@ -0,0 +1,46 @@
### Mutation
Use this mutation to import a template for a simulated device that pushes GeoJSON data via Zabbix Trapper items.
```graphql
mutation ImportSimulatedBTTemplate($templates: [CreateTemplate!]!) {
importTemplates(templates: $templates) {
host
templateid
message
error {
message
}
}
}
```
### Variables
The following sample defines the `SIMULATED_BT_DEVICE` template. Note the `deviceType` tag set to `TrackedDevice`, which instructs the GraphQL API to resolve this host using the specialized `TrackedDevice` type.
We use the `state.current.json_geojson` key for the trapper item. The `json_` prefix ensures that the JSON string received from Zabbix is automatically parsed into a `JSONObject` by the GraphQL resolver.
```json
{
"templates": [
{
"host": "SIMULATED_BT_DEVICE",
"name": "Simulated BT Device",
"groupNames": ["Templates/Roadwork/Devices"],
"tags": [
{ "tag": "class", "value": "roadwork" },
{ "tag": "deviceType", "value": "TrackedDevice" }
],
"items": [
{
"name": "GeoJSON Data",
"type": 2,
"key": "state.current.json_geojson",
"value_type": 4,
"history": "7d",
"description": "Trapper item receiving GeoJSON payloads"
}
]
}
]
}
```

View file

@ -0,0 +1,470 @@
### Mutation
Use this mutation to push multiple GeoJSON data points to a simulated device. Each point is pushed with its own timestamp extracted from the GeoJSON properties.
```graphql
mutation PushGeoJsonHistory($host: String!, $key: String!, $values: [HistoryPushInput!]!) {
pushHistory(host: $host, key: $key, values: $values) {
message
data {
itemid
error {
message
}
}
}
}
```
### Variables
The following variables push 20 GeoJSON features to the `Vehicle1` host.
Note that we use the technical Zabbix key `state.current.json_geojson` (including the `json_` prefix) to target the correct trapper item.
```json
{
"host": "Vehicle1",
"key": "state.current.json_geojson",
"values": [
{
"timestamp": "2026-02-02T16:00:00.000Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:00:00.000Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.980238638943689,
50.94213786322479
]
}
}
},
{
"timestamp": "2026-02-02T16:00:12.838Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:00:12.838Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.979929916761165,
50.9418828739594
]
}
}
},
{
"timestamp": "2026-02-02T16:00:24.413Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:00:24.413Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.97947100540884,
50.9418828739594
]
}
}
},
{
"timestamp": "2026-02-02T16:00:38.910Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:00:38.910Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.978962030999412,
50.94205111445655
]
}
}
},
{
"timestamp": "2026-02-02T16:00:48.218Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:00:48.218Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.97871171571623,
50.94222198308785
]
}
}
},
{
"timestamp": "2026-02-02T16:00:55.942Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:00:55.942Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.978469744275316,
50.9423402763876
]
}
}
},
{
"timestamp": "2026-02-02T16:01:07.048Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:01:07.048Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.978269492048469,
50.94209317448525
]
}
}
},
{
"timestamp": "2026-02-02T16:01:18.399Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:01:18.399Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.97871171571623,
50.94214574946821
]
}
}
},
{
"timestamp": "2026-02-02T16:01:27.785Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:01:27.785Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.9789870625279775,
50.942303474059884
]
}
}
},
{
"timestamp": "2026-02-02T16:01:38.408Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:01:38.408Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.979103876326803,
50.94255846101797
]
}
}
},
{
"timestamp": "2026-02-02T16:01:50.930Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:01:50.930Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.979337503923659,
50.94283447625244
]
}
}
},
{
"timestamp": "2026-02-02T16:01:58.600Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:01:58.600Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.979475177329533,
50.94300534200477
]
}
}
},
{
"timestamp": "2026-02-02T16:02:15.429Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:02:15.429Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.979212346282509,
50.942618921637944
]
}
}
},
{
"timestamp": "2026-02-02T16:02:30.260Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:02:30.260Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.97896620292093,
50.94228244414538
]
}
}
},
{
"timestamp": "2026-02-02T16:02:36.242Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:02:36.242Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.978816013750361,
50.942166779444534
]
}
}
},
{
"timestamp": "2026-02-02T16:02:46.091Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:02:46.091Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.978582386152624,
50.94196962304159
]
}
}
},
{
"timestamp": "2026-02-02T16:02:56.752Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:02:56.752Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.979003750213366,
50.941948592976075
]
}
}
},
{
"timestamp": "2026-02-02T16:03:15.630Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:03:15.630Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.979721320691965,
50.94181452608393
]
}
}
},
{
"timestamp": "2026-02-02T16:03:24.823Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:03:24.823Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.980071762088585,
50.9418776164344
]
}
}
},
{
"timestamp": "2026-02-02T16:03:36.019Z",
"value": {
"type": "Feature",
"properties": {
"time": "2026-02-02T16:03:36.019Z",
"deviceType": "locationTracker",
"parent": {
"type": "vehicle",
"subType": "forklift",
"name": "vehicle1"
}
},
"geometry": {
"type": "Point",
"coordinates": [
6.980234467022115,
50.94213786322479
]
}
}
}
]
}
```

View file

@ -0,0 +1,19 @@
### Query
Retrieve the state of tracked devices.
```graphql
query GetSimulatedState {
allDevices(tag_deviceType: ["TrackedDevice"]) {
name
host
deviceType
... on TrackedDevice {
state {
current {
geojson
}
}
}
}
}
```

View file

@ -53,6 +53,9 @@ This document outlines the test cases and coverage for the Zabbix GraphQL API.
- **TC-AUTH-04**: Import user rights.
- **TC-AUTH-05**: Import user rights using sample mutation.
### History and Data Pushing
- **TC-HIST-01**: Push history data using `pushHistory` mutation.
### Query Optimization
- **TC-OPT-01**: Verify that GraphQL queries only fetch requested fields from Zabbix (reduced output).
- **TC-OPT-02**: Verify that skippable Zabbix parameters (like selectItems) are omitted if not requested in GraphQL.
@ -91,6 +94,7 @@ The `runAllRegressionTests` mutation (TC-E2E-02) executes the following checks:
- **State sub-properties**: Verifies that requesting device state sub-properties correctly triggers the retrieval of required Zabbix items, even if `items` is not explicitly requested (verifying the indirect dependency logic).
- **Negative Optimization (allDevices)**: Verifies that items are NOT requested from Zabbix if neither `items` nor `state` (or state sub-properties) are requested within the `allDevices` query.
- **allDevices deviceType filter**: Verifies that the `allDevices` query only returns hosts that have a `deviceType` tag, and that the `deviceType` field is populated for all results.
- **pushHistory mutation**: Verifies that the `pushHistory` mutation correctly pushes data to ZABBIX_TRAP items, using either item ID or a combination of host and item key.
## ✅ Test Coverage Checklist

View file

@ -1,6 +1,6 @@
# 🏗️ Trade Fair Logistics Requirements
This document outlines the requirements for extending the Zabbix GraphQL API to support trade fair logistics, derived from the analysis of the "KI-gestützte Orchestrierung in der Messelogistik" (AI-supported orchestration in trade fair logistics) pilot at Koelnmesse.
This document outlines the requirements for extending the Zabbix GraphQL API to support trade fair logistics, derived from the analysis of the planned "AI-supported orchestration in trade fair logistics" pilot at Koelnmesse.
## 📋 Project Context
The goal is to use the **Virtual Control Room (VCR)** as an orchestration platform to improve punctuality, throughput, and exception handling in trade fair logistics.

View file

@ -18,3 +18,7 @@ introspection:
enabled: true
search:
enabled: true
logging:
level: ${MCP_LOG_LEVEL:-info}
parameters: ${MCP_LOG_PARAMETERS:-false}
responses: ${MCP_LOG_RESPONSES:-false}

View file

@ -25,6 +25,34 @@ type DistanceTrackerDevice implements Host & Device {
state: DistanceTrackerState
}
"""
TrackedDevice represents a device used for simulation or tracking purposes,
pushing its state (e.g. geojson) via Zabbix Trap.
"""
type TrackedDevice implements Host & Device {
hostid: ID!
host: String!
deviceType: String
hostgroups: [HostGroup!]
name: String
tags: DeviceConfig
inventory: Inventory
items: [ZabbixItem!]
state: TrackedState
}
type TrackedState implements DeviceState {
operational: OperationalDeviceData
current: TrackedValues
}
type TrackedValues {
"""
GeoJSON representation of the tracked device's location or path.
"""
geojson: JSONObject
}
"""
Represents the state of a distance tracker device.
"""

View file

@ -127,6 +127,22 @@ type Mutation {
name_pattern: String
): [DeleteResponse!]
"""
Pushes history data to Zabbix (ZABBIX_TRAP items).
Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
"""
pushHistory(
"""Item ID to push data to."""
itemid: Int,
"""Item key to push data to (required if itemid is empty)."""
key: String,
"""Host name the item belongs to (required if itemid is empty)."""
host: String,
"""Values to push."""
values: [HistoryPushInput!]!
): HistoryPushResponse
"""
Runs a smoketest: creates a template, links a host, verifies it, and cleans up.
"""
@ -770,3 +786,35 @@ type ImportUserRightResult {
errors: [ApiError!]
}
"""
Input for pushing history data.
"""
input HistoryPushInput {
"""Timestamp of the value."""
timestamp: DateTime!
"""The value to push (JSON object)."""
value: JSONObject!
}
"""
Response object for pushHistory operation.
"""
type HistoryPushResponse {
"""Overall status message."""
message: String
"""Error information if the operation failed."""
error: ApiError
"""Detailed results for each pushed value."""
data: [HistoryPushData!]
}
"""
Detailed result for a single pushed value.
"""
type HistoryPushData {
"""The Zabbix item ID."""
itemid: String
"""Error information if the push failed for this item."""
error: ApiError
}

View file

@ -11,6 +11,7 @@ import {
MutationImportTemplateGroupsArgs,
MutationImportTemplatesArgs,
MutationImportUserRightsArgs,
MutationPushHistoryArgs,
Permission,
QueryAllDevicesArgs,
QueryAllHostGroupsArgs,
@ -34,6 +35,7 @@ import {TemplateDeleter} from "../execution/template_deleter.js";
import {HostValueExporter} from "../execution/host_exporter.js";
import {logger} from "../logging/logger.js";
import {ParsedArgs, ZabbixRequest} from "../datasources/zabbix-request.js";
import {ZabbixHistoryGetParams, ZabbixHistoryPushParams, ZabbixHistoryPushRequest, ZabbixQueryHistoryRequest} from "../datasources/zabbix-history.js";
import {
ZabbixCreateHostRequest,
ZabbixQueryDevices,
@ -257,6 +259,21 @@ export function createResolvers(): Resolvers {
}: any) => {
return TemplateImporter.importTemplates(args.templates, zabbixAuthToken, cookie)
},
pushHistory: async (_parent: any, args: MutationPushHistoryArgs, {
zabbixAuthToken,
cookie
}: any) => {
const result = await new ZabbixHistoryPushRequest(zabbixAuthToken, cookie)
.executeRequestThrowError(zabbixAPI, new ZabbixHistoryPushParams(args.values, args.itemid?.toString(), args.key ?? undefined, args.host ?? undefined));
return {
message: result.response === "success" ? "History pushed successfully" : "Some errors occurred",
data: result.data.map(d => ({
itemid: d.itemid,
error: Array.isArray(d.error) ? {message: d.error.join(", ")} : d.error
}))
}
},
deleteTemplates: async (_parent: any, args: MutationDeleteTemplatesArgs, {
zabbixAuthToken,
cookie

View file

@ -11,6 +11,7 @@ import {logger} from "../logging/logger.js";
import {zabbixAPI, zabbixDevelopmentToken} from "../datasources/zabbix-api.js";
import {WebSocketServer} from "ws";
import {useServer} from "graphql-ws/lib/use/ws";
import {Config} from "../common_utils.js";
const GRAPHQL_PATH = "/"
const GRAPHQL_PORT = 4000
@ -59,6 +60,25 @@ async function startApolloServer() {
};
},
},
// Request logging plugin
{
async requestDidStart(requestContext) {
if (Config.VERBOSITY > 0) {
logger.info(`GraphQL Request: ${requestContext.request.operationName || 'Unnamed Operation'}`);
if (requestContext.request.variables) {
logger.info(`Parameters: ${JSON.stringify(requestContext.request.variables, null, 2)}`);
}
}
return {
async willSendResponse(requestContext) {
if (Config.VERBOSITY > 1) {
logger.info(`GraphQL Response for ${requestContext.request.operationName || 'Unnamed Operation'}:`);
logger.info(JSON.stringify(requestContext.response.body, null, 2));
}
},
};
},
},
],
});

View file

@ -15,6 +15,7 @@ static readonly DRY_RUN = process.env.DRY_RUN
static readonly ZABBIX_ROADWORK_BASE_GROUP = process.env.ZABBIX_ROADWORK_BASE_GROUP
static readonly ZABBIX_PERMISSION_TEMPLATE_GROUP_NAME_PREFIX = process.env.ZABBIX_PERMISSION_TEMPLATE_GROUP_NAME_PREFIX || "Permissions"
static readonly LOG_LEVELS = process.env.LOG_LEVELS
static readonly VERBOSITY = process.env.VERBOSITY ? parseInt(process.env.VERBOSITY) : 0
static readonly HOST_TYPE_FILTER_DEFAULT = process.env.HOST_TYPE_FILTER_DEFAULT;
static readonly HOST_GROUP_FILTER_DEFAULT = process.env.HOST_GROUP_FILTER_DEFAULT;
}

View file

@ -1,7 +1,10 @@
import {SortOrder, StorageItemType} from "../schema/generated/graphql.js";
import {ParsedArgs, ZabbixParams, ZabbixRequest, ZabbixResult} from "./zabbix-request.js";
import {ApiError, SortOrder, StorageItemType} from "../schema/generated/graphql.js";
import {isZabbixErrorResult, ParsedArgs, ZabbixErrorResult, ZabbixParams, ZabbixRequest, ZabbixResult} from "./zabbix-request.js";
import {ZabbixAPI} from "./zabbix-api.js";
import {GraphQLError} from "graphql";
export interface ZabbixValue {
itemid?: string,
key?: string,
host?: string,
value: string,
@ -18,13 +21,13 @@ export class ZabbixHistoryGetParams extends ParsedArgs {
time_till_ms: number | undefined
constructor(public itemids: number[] | number | string | string[],
public output: string[] = ["value", "itemid", "clock", "ns"],
public limit: number | null = Array.isArray(itemids) ? itemids.length : 1,
public history: StorageItemType | string = StorageItemType.Text,
time_from?: Date,
time_until?: Date,
public sortfield: string[] = ["clock", "ns"],
public sortorder: SortOrder | null = SortOrder.Desc,
public output: string[] = ["value", "itemid", "clock", "ns"],
public limit: number | null = Array.isArray(itemids) ? itemids.length : 1,
public history: StorageItemType | string = StorageItemType.Text,
time_from?: Date,
time_until?: Date,
public sortfield: string[] = ["clock", "ns"],
public sortorder: SortOrder | null = SortOrder.Desc,
) {
super();
this.time_from_ms = time_from ? Math.floor(new Date(time_from).getTime() / 1000) : undefined
@ -50,3 +53,58 @@ export class ZabbixQueryHistoryRequest extends ZabbixRequest<ZabbixExportValue[]
}
}
}
export interface ZabbixHistoryPushInput {
timestamp: string
value: any,
}
export interface ZabbixHistoryPushResult {
response: string,
data: { itemid: string, error?: string[] | ApiError }[],
error?: ApiError | string[]
}
export class ZabbixHistoryPushParams extends ParsedArgs {
constructor(public values: ZabbixHistoryPushInput[], public itemid?: string,
public key?: string,
public host?: string,) {
super();
}
}
export class ZabbixHistoryPushRequest extends ZabbixRequest<ZabbixHistoryPushResult, ZabbixHistoryPushParams> {
constructor(authToken?: string | null, cookie?: string) {
super("history.push", authToken, cookie);
}
async prepare(zabbixAPI: ZabbixAPI, args?: ZabbixHistoryPushParams): Promise<ZabbixHistoryPushResult | ZabbixErrorResult | undefined> {
if (!args) return undefined;
if (!args.itemid && (!args.key || !args.host)) {
throw new GraphQLError("if itemid is empty both key and host must be filled");
}
return super.prepare(zabbixAPI, args);
}
createZabbixParams(args?: ZabbixHistoryPushParams): ZabbixParams {
if (!args) return [];
return args.values.map(v => {
const date = new Date(v.timestamp);
const result: any = {
value: typeof v.value === 'string' ? v.value : JSON.stringify(v.value),
clock: Math.floor(date.getTime() / 1000),
ns: (date.getTime() % 1000) * 1000000
};
if (args.itemid) {
result.itemid = args.itemid;
} else {
result.host = args.host;
result.key = args.key;
}
return result as ZabbixValue;
});
}
}

View file

@ -33,4 +33,3 @@ export class ZabbixQueryItemsRequest extends ZabbixRequest<ZabbixItem[]> {
};
}
}

View file

@ -208,7 +208,8 @@ export class ZabbixRequest<T extends ZabbixResult, A extends ParsedArgs = Parsed
return paramsObj;
})
} else {
params = {...this.requestBodyTemplate.params, ...zabbixParams ?? this.createZabbixParams(args, output)}
const p = zabbixParams ?? this.createZabbixParams(args, output);
params = Array.isArray(p) ? p : {...this.requestBodyTemplate.params, ...p}
}
return params ? {
...this.requestBodyTemplate,

View file

@ -85,8 +85,7 @@ export class HostValueExporter {
filter: {
host: hostFilter,
key_: itemKeyFilter
},
tags: [{"tag": "hasValue", "operator": 1, "value": "true"}]
}
}))
if (isZabbixErrorResult(items)) {

View file

@ -13,6 +13,7 @@ import {
} from "../datasources/zabbix-hosts.js";
import {ZabbixQueryTemplatesRequest} from "../datasources/zabbix-templates.js";
import {isZabbixErrorResult, ParsedArgs, ZabbixRequest} from "../datasources/zabbix-request.js";
import {ZabbixHistoryPushParams, ZabbixHistoryPushRequest} from "../datasources/zabbix-history.js";
export class RegressionTestExecutor {
public static async runAllRegressionTests(zabbixAuthToken?: string, cookie?: string): Promise<SmoketestResponse> {
@ -530,6 +531,52 @@ export class RegressionTestExecutor {
}
}
// Regression 13: pushHistory mutation
const pushHostName = "REG_PUSH_HOST_" + Math.random().toString(36).substring(7);
const pushItemKey = "trap.json";
// Create host
const pushHostResult = await HostImporter.importHosts([{
deviceKey: pushHostName,
deviceType: "RegressionHost",
groupNames: [hostGroupName],
templateNames: []
}], zabbixAuthToken, cookie);
let pushSuccess = false;
if (pushHostResult?.length && pushHostResult[0].hostid) {
const pushHostId = pushHostResult[0].hostid;
// Add trapper item to host
const pushItemResult = await new ZabbixRequest("item.create", zabbixAuthToken, cookie).executeRequestReturnError(zabbixAPI, new ParsedArgs({
name: "Trapper JSON Item",
key_: pushItemKey,
hostid: pushHostId,
type: 2, // Zabbix trapper
value_type: 4, // Text
history: "1d"
}));
if (!isZabbixErrorResult(pushItemResult)) {
// Push data
const pushRequest = new ZabbixHistoryPushRequest(zabbixAuthToken, cookie);
const pushParams = new ZabbixHistoryPushParams(
[{ timestamp: new Date().toISOString(), value: { hello: "world" } }],
undefined, pushItemKey, pushHostName
);
const pushDataResult = await pushRequest.executeRequestReturnError(zabbixAPI, pushParams);
pushSuccess = !isZabbixErrorResult(pushDataResult) && pushDataResult.response === "success";
}
}
steps.push({
name: "REG-PUSH: pushHistory mutation",
success: pushSuccess,
message: pushSuccess ? "Successfully pushed history data to trapper item" : "Failed to push history data"
});
if (!pushSuccess) success = false;
// Step 1: Create Host Group (Legacy test kept for compatibility)
const groupResult = await HostImporter.importHostGroups([{
groupName: groupName
@ -549,6 +596,7 @@ export class RegressionTestExecutor {
await HostDeleter.deleteHosts(null, metaHostName, zabbixAuthToken, cookie);
await HostDeleter.deleteHosts(null, devHostNameWithTag, zabbixAuthToken, cookie);
await HostDeleter.deleteHosts(null, devHostNameWithoutTag, zabbixAuthToken, cookie);
await HostDeleter.deleteHosts(null, pushHostName, zabbixAuthToken, cookie);
await TemplateDeleter.deleteTemplates(null, regTemplateName, zabbixAuthToken, cookie);
await TemplateDeleter.deleteTemplates(null, httpTempName, zabbixAuthToken, cookie);
await TemplateDeleter.deleteTemplates(null, macroTemplateName, zabbixAuthToken, cookie);

View file

@ -400,6 +400,34 @@ export interface GpsPosition {
longitude?: Maybe<Scalars['Float']['output']>;
}
/** Detailed result for a single pushed value. */
export interface HistoryPushData {
__typename?: 'HistoryPushData';
/** Error information if the push failed for this item. */
error?: Maybe<ApiError>;
/** The Zabbix item ID. */
itemid?: Maybe<Scalars['String']['output']>;
}
/** Input for pushing history data. */
export interface HistoryPushInput {
/** Timestamp of the value. */
timestamp: Scalars['DateTime']['input'];
/** The value to push (JSON object). */
value: Scalars['JSONObject']['input'];
}
/** Response object for pushHistory operation. */
export interface HistoryPushResponse {
__typename?: 'HistoryPushResponse';
/** Detailed results for each pushed value. */
data?: Maybe<Array<HistoryPushData>>;
/** Error information if the operation failed. */
error?: Maybe<ApiError>;
/** Overall status message. */
message?: Maybe<Scalars['String']['output']>;
}
/** Common interface for all host-like entities in Zabbix. */
export interface Host {
/**
@ -576,6 +604,12 @@ export interface Mutation {
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
*/
importUserRights?: Maybe<ImportUserRightsResult>;
/**
* Pushes history data to Zabbix (ZABBIX_TRAP items).
*
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
*/
pushHistory?: Maybe<HistoryPushResponse>;
/** Runs all regression tests. */
runAllRegressionTests: SmoketestResponse;
/** Runs a smoketest: creates a template, links a host, verifies it, and cleans up. */
@ -642,6 +676,14 @@ export interface MutationImportUserRightsArgs {
}
export interface MutationPushHistoryArgs {
host?: InputMaybe<Scalars['String']['input']>;
itemid?: InputMaybe<Scalars['Int']['input']>;
key?: InputMaybe<Scalars['String']['input']>;
values: Array<HistoryPushInput>;
}
export interface MutationRunSmoketestArgs {
groupName: Scalars['String']['input'];
hostName: Scalars['String']['input'];
@ -1259,6 +1301,9 @@ export type ResolversTypes = {
GenericDeviceState: ResolverTypeWrapper<GenericDeviceState>;
GenericResponse: ResolverTypeWrapper<GenericResponse>;
GpsPosition: ResolverTypeWrapper<ResolversInterfaceTypes<ResolversTypes>['GpsPosition']>;
HistoryPushData: ResolverTypeWrapper<HistoryPushData>;
HistoryPushInput: HistoryPushInput;
HistoryPushResponse: ResolverTypeWrapper<HistoryPushResponse>;
Host: ResolverTypeWrapper<ResolversInterfaceTypes<ResolversTypes>['Host']>;
HostGroup: ResolverTypeWrapper<HostGroup>;
ID: ResolverTypeWrapper<Scalars['ID']['output']>;
@ -1335,6 +1380,9 @@ export type ResolversParentTypes = {
GenericDeviceState: GenericDeviceState;
GenericResponse: GenericResponse;
GpsPosition: ResolversInterfaceTypes<ResolversParentTypes>['GpsPosition'];
HistoryPushData: HistoryPushData;
HistoryPushInput: HistoryPushInput;
HistoryPushResponse: HistoryPushResponse;
Host: ResolversInterfaceTypes<ResolversParentTypes>['Host'];
HostGroup: HostGroup;
ID: Scalars['ID']['output'];
@ -1517,6 +1565,19 @@ export type GpsPositionResolvers<ContextType = any, ParentType extends Resolvers
longitude?: Resolver<Maybe<ResolversTypes['Float']>, ParentType, ContextType>;
};
export type HistoryPushDataResolvers<ContextType = any, ParentType extends ResolversParentTypes['HistoryPushData'] = ResolversParentTypes['HistoryPushData']> = {
error?: Resolver<Maybe<ResolversTypes['ApiError']>, ParentType, ContextType>;
itemid?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
};
export type HistoryPushResponseResolvers<ContextType = any, ParentType extends ResolversParentTypes['HistoryPushResponse'] = ResolversParentTypes['HistoryPushResponse']> = {
data?: Resolver<Maybe<Array<ResolversTypes['HistoryPushData']>>, ParentType, ContextType>;
error?: Resolver<Maybe<ResolversTypes['ApiError']>, ParentType, ContextType>;
message?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
};
export type HostResolvers<ContextType = any, ParentType extends ResolversParentTypes['Host'] = ResolversParentTypes['Host']> = {
__resolveType: TypeResolveFn<'GenericDevice' | 'ZabbixHost', ParentType, ContextType>;
deviceType?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
@ -1591,6 +1652,7 @@ export type MutationResolvers<ContextType = any, ParentType extends ResolversPar
importTemplateGroups?: Resolver<Maybe<Array<ResolversTypes['CreateTemplateGroupResponse']>>, ParentType, ContextType, RequireFields<MutationImportTemplateGroupsArgs, 'templateGroups'>>;
importTemplates?: Resolver<Maybe<Array<ResolversTypes['ImportTemplateResponse']>>, ParentType, ContextType, RequireFields<MutationImportTemplatesArgs, 'templates'>>;
importUserRights?: Resolver<Maybe<ResolversTypes['ImportUserRightsResult']>, ParentType, ContextType, RequireFields<MutationImportUserRightsArgs, 'dryRun' | 'input'>>;
pushHistory?: Resolver<Maybe<ResolversTypes['HistoryPushResponse']>, ParentType, ContextType, RequireFields<MutationPushHistoryArgs, 'values'>>;
runAllRegressionTests?: Resolver<ResolversTypes['SmoketestResponse'], ParentType, ContextType>;
runSmoketest?: Resolver<ResolversTypes['SmoketestResponse'], ParentType, ContextType, RequireFields<MutationRunSmoketestArgs, 'groupName' | 'hostName' | 'templateName'>>;
};
@ -1786,6 +1848,8 @@ export type Resolvers<ContextType = any> = {
GenericDeviceState?: GenericDeviceStateResolvers<ContextType>;
GenericResponse?: GenericResponseResolvers<ContextType>;
GpsPosition?: GpsPositionResolvers<ContextType>;
HistoryPushData?: HistoryPushDataResolvers<ContextType>;
HistoryPushResponse?: HistoryPushResponseResolvers<ContextType>;
Host?: HostResolvers<ContextType>;
HostGroup?: HostGroupResolvers<ContextType>;
ImportHostResponse?: ImportHostResponseResolvers<ContextType>;

View file

@ -0,0 +1,66 @@
import {ZabbixHistoryPushParams, ZabbixHistoryPushRequest} from "../datasources/zabbix-history.js";
import {zabbixAPI} from "../datasources/zabbix-api.js";
import {GraphQLError} from "graphql";
// Mocking ZabbixAPI
jest.mock("../datasources/zabbix-api.js", () => ({
zabbixAPI: {
post: jest.fn(),
}
}));
describe("ZabbixHistoryPushRequest", () => {
let request: ZabbixHistoryPushRequest;
beforeEach(() => {
jest.clearAllMocks();
request = new ZabbixHistoryPushRequest("token");
});
test("createZabbixParams - transformation", () => {
const values = [
{ timestamp: "2024-01-01T10:00:00Z", value: { key: "value" } },
{ timestamp: "2024-01-01T10:00:01.500Z", value: "simple value" }
];
const params = new ZabbixHistoryPushParams(values, "1", "item.key", "host.name");
const zabbixParams = request.createZabbixParams(params);
expect(zabbixParams).toHaveLength(2);
expect(zabbixParams[0]).toEqual({
itemid: "1",
value: JSON.stringify({ key: "value" }),
clock: 1704103200,
ns: 0
});
expect(zabbixParams[1]).toEqual({
itemid: "1",
value: "simple value",
clock: 1704103201,
ns: 500000000
});
});
test("createZabbixParams - transformation without itemid", () => {
const values = [
{ timestamp: "2024-01-01T10:00:00Z", value: { key: "value" } }
];
const params = new ZabbixHistoryPushParams(values, undefined, "item.key", "host.name");
const zabbixParams = request.createZabbixParams(params);
expect(zabbixParams).toHaveLength(1);
expect(zabbixParams[0]).toEqual({
host: "host.name",
key: "item.key",
value: JSON.stringify({ key: "value" }),
clock: 1704103200,
ns: 0
});
});
test("prepare - throw error if item missing", async () => {
const values = [{ timestamp: "2024-01-01T10:00:00Z", value: "val" }];
const params = new ZabbixHistoryPushParams(values, undefined, undefined, "host.name");
await expect(request.prepare(zabbixAPI, params)).rejects.toThrow("if itemid is empty both key and host must be filled");
});
});

View file

@ -0,0 +1,127 @@
import {ApolloServer} from '@apollo/server';
import {schema_loader} from '../api/schema.js';
import {zabbixAPI} from '../datasources/zabbix-api.js';
// Mocking ZabbixAPI
jest.mock("../datasources/zabbix-api.js", () => ({
zabbixAPI: {
post: jest.fn(),
}
}));
describe("History Push Integration Tests", () => {
let server: ApolloServer;
beforeAll(async () => {
const schema = await schema_loader();
server = new ApolloServer({
schema,
});
});
beforeEach(() => {
jest.clearAllMocks();
});
test("Mutation pushHistory - success with itemid", async () => {
const mutation = `
mutation PushHistory($itemid: Int, $values: [HistoryPushInput!]!) {
pushHistory(itemid: $itemid, values: $values) {
message
data {
itemid
}
}
}
`;
const variables = {
itemid: 1,
values: [
{ timestamp: "2024-01-01T10:00:00Z", value: { foo: "bar" } }
]
};
(zabbixAPI.post as jest.Mock).mockResolvedValueOnce({
response: "success",
data: [{ itemid: "1" }]
});
const response = await server.executeOperation({
query: mutation,
variables: variables,
}, {
contextValue: { zabbixAuthToken: 'test-token', dataSources: { zabbixAPI: zabbixAPI } }
});
expect(response.body.kind).toBe('single');
// @ts-ignore
const result = response.body.singleResult;
expect(result.errors).toBeUndefined();
expect(result.data.pushHistory.data[0].itemid).toBe("1");
expect(zabbixAPI.post).toHaveBeenCalledWith("history.push", expect.objectContaining({
body: expect.objectContaining({
method: "history.push",
params: expect.arrayContaining([
expect.objectContaining({
itemid: "1",
value: JSON.stringify({ foo: "bar" })
})
])
})
}));
});
test("Mutation pushHistory - success with key and host", async () => {
const mutation = `
mutation PushHistory($key: String, $host: String, $values: [HistoryPushInput!]!) {
pushHistory(key: $key, host: $host, values: $values) {
message
data {
itemid
}
}
}
`;
const variables = {
key: "item.key",
host: "host.name",
values: [
{ timestamp: "2024-01-01T10:00:00Z", value: { message: "plain value" } }
]
};
// Mock history.push
(zabbixAPI.post as jest.Mock).mockResolvedValueOnce({
response: "success",
data: [{ itemid: "1" }]
});
const response = await server.executeOperation({
query: mutation,
variables: variables,
}, {
contextValue: { zabbixAuthToken: 'test-token', dataSources: { zabbixAPI: zabbixAPI } }
});
expect(response.body.kind).toBe('single');
// @ts-ignore
const result = response.body.singleResult;
expect(result.errors).toBeUndefined();
expect(result.data.pushHistory.data[0].itemid).toBe("1");
expect(zabbixAPI.post).toHaveBeenCalledWith("history.push", expect.objectContaining({
body: expect.objectContaining({
method: "history.push",
params: expect.arrayContaining([
expect.objectContaining({
host: "host.name",
key: "item.key"
})
])
})
}));
});
});

View file

@ -55,7 +55,7 @@ zabbix_export:
key: 'mqtt.get["tcp://mqtt-broker:1883","deviceValue/location"]'
- uuid: 380c4a7d752848cba3b5a59a0f9b13c0
name: MQTT_LOCATION
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","deviceValue/location"]'
history: '0'
value_type: TEXT
@ -135,23 +135,23 @@ zabbix_export:
key: 'mqtt.get["tcp://mqtt-broker:1883","deviceValue/count"]'
- uuid: 905c5f1b6e524bd2b227769a59f4df1b
name: MQTT_COUNT
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","deviceValue/count"]'
history: '0'
value_type: TEXT
- uuid: 6fa441872c3140f4adecf39956245603
name: MQTT_DISTANCE
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","deviceValue/distance"]'
value_type: TEXT
- uuid: 69d2afa4a0324d818150e9473c3264f3
name: MQTT_NAME
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","deviceValue/name"]'
value_type: TEXT
- uuid: 45ff9430d27f47a492c98fce03fc7962
name: MQTT_SERVICE_DATA
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","deviceValue/ServiceData"]'
value_type: TEXT
- uuid: 3bf0d3017ea54e1da2a764c3f96bf97e
@ -224,6 +224,89 @@ zabbix_export:
value: '8'
- tag: deviceWidgetPreview.TOP_LEFT.value_font_size
value: '24'
- uuid: 27474f627cb344b782a81c16d7e0c7d1
template: BT_DEVICE_TRACKER
name: BT_DEVICE_TRACKER
vendor:
name: 'Hilbig IT GmbH'
version: 2.1.1
templates:
- name: ROADWORK_DEVICE
groups:
- name: Templates/Roadwork/Devices
items:
- name: location
type: DEPENDENT
key: location
history: 2d
value_type: TEXT
preprocessing:
- type: JAVASCRIPT
parameters:
- |
var obj=JSON.parse(value);
if (obj["isFiltered"]) {
throw "Result is filtered";
return "filtered";
}
return value;
- type: NOT_MATCHES_REGEX
parameters:
- filtered
error_handler: DISCARD_VALUE
master_item:
key: 'mqtt.trap[deviceValue/location]'
- name: MQTT_LOCATION
type: TRAP
key: 'mqtt.trap[deviceValue/location]'
history: '0'
value_type: TEXT
- name: coords
type: DEPENDENT
key: state.current.values.coords
history: 2d
value_type: TEXT
preprocessing:
- type: JAVASCRIPT
parameters:
- |
var obj=JSON.parse(value);
var location = obj["location"];
var coords = location["coords"];
return JSON.stringify({
"btDeviceKey": obj["btDeviceKey"],
"timestamp": location["timestamp"],
"deviceName": obj["deviceName"],
"latitude": coords[1],
"longitude": coords[0],
"coords": coords
});
master_item:
key: location
tags:
- tag: hasValue
value: 'true'
- name: geojson
type: DEPENDENT
key: state.current.values.geojson
history: 2d
value_type: TEXT
preprocessing:
- type: JSONPATH
parameters:
- $.location.setup
master_item:
key: location
tags:
- tag: hasValue
value: 'true'
tags:
- tag: class
value: roadwork
- tag: deviceType
value: GenericDevice
- uuid: 6490907a74964d0797c7acd1938bc553
template: GEOLOCATION
name: GEOLOCATION
@ -236,7 +319,7 @@ zabbix_export:
items:
- uuid: 4ad4d9a769744615816d190c34cb49c7
name: GPS_LOCATION_MQTT
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","operationalValue/location"]'
history: '0'
value_type: TEXT
@ -338,25 +421,25 @@ zabbix_export:
items:
- uuid: 602290e9f42f4135b548e1cd45abe135
name: DENSITY_MQTT
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","operationalValue/density"]'
history: '0'
value_type: TEXT
- uuid: 87e0a14266984247b81fdc757dea5bde
name: ERROR_MQTT
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","operationalValue/error"]'
history: '0'
value_type: TEXT
- uuid: 644b0ec2e3d9448da1a69561ec10d19d
name: SIGNALSTRENGTH_MQTT
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","operationalValue/signalstrength"]'
history: '0'
value_type: TEXT
- uuid: 67c01d7334a24823832bba74073cf356
name: TEMPERATURE_MQTT
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","operationalValue/temperature"]'
history: '0'
value_type: TEXT
@ -373,7 +456,7 @@ zabbix_export:
value: operationalValue
- uuid: 0352c80c749d4d91b386dab9c74ef3c6
name: VOLTAGE_MQTT
type: ZABBIX_AGENT
type: TRAP
key: 'mqtt.get["tcp://mqtt-broker:1883","operationalValue/voltage"]'
history: '0'
value_type: TEXT

View file

@ -0,0 +1,90 @@
zabbix_export:
version: '7.4'
templates:
- uuid: 27474f627cb344b782a81c16d7e0c7d1
template: BT_DEVICE_TRACKER
name: BT_DEVICE_TRACKER
vendor:
name: 'Hilbig IT GmbH'
version: 2.1.1
templates:
- name: ROADWORK_DEVICE
groups:
- name: Templates/Roadwork/Devices
items:
- uuid: d4d3ec9f3ca940a39a721b6cfd2f3471
name: location
type: DEPENDENT
key: location
history: 2d
value_type: TEXT
preprocessing:
- type: JAVASCRIPT
parameters:
- |
var obj=JSON.parse(value);
if (obj["isFiltered"]) {
throw "Result is filtered";
return "filtered";
}
return value;
- type: NOT_MATCHES_REGEX
parameters:
- filtered
error_handler: DISCARD_VALUE
master_item:
key: 'mqtt.trap[deviceValue/location]'
- uuid: 380c4a7d752848cba3b5a59a0f9b13c0
name: MQTT_LOCATION
type: TRAP
key: 'mqtt.trap[deviceValue/location]'
history: '0'
value_type: TEXT
- uuid: 29faf53c033840c0b1405f8240e30312
name: coords
type: DEPENDENT
key: state.current.values.coords
history: 2d
value_type: TEXT
preprocessing:
- type: JAVASCRIPT
parameters:
- |
var obj=JSON.parse(value);
var location = obj["location"];
var coords = location["coords"];
return JSON.stringify({
"btDeviceKey": obj["btDeviceKey"],
"timestamp": location["timestamp"],
"deviceName": obj["deviceName"],
"latitude": coords[1],
"longitude": coords[0],
"coords": coords
});
master_item:
key: location
tags:
- tag: hasValue
value: 'true'
- uuid: 1ae9486c18394e56b114c9cb4546deaf
name: geojson
type: DEPENDENT
key: state.current.values.geojson
history: 2d
value_type: TEXT
preprocessing:
- type: JSONPATH
parameters:
- $.location.setup
master_item:
key: location
tags:
- tag: hasValue
value: 'true'
tags:
- tag: class
value: roadwork
- tag: deviceType
value: bt_device_tracker_generic