feat: implement smoketest and extend host provisioning with template linking
- Add runSmoketest mutation to automate end-to-end verification. - Add SmoketestExecutor and HostDeleter to support automated testing and cleanup. - Extend createHost and importHosts to allow linking templates by name or ID. - Update docs/howtos/cookbook.md with new recipe steps and AI/MCP guidance. - Update .junie/guidelines.md with new verification and deployment standards. - Add src/test/template_link.test.ts and update existing tests to cover new functionality. - Regenerate GraphQL types to match schema updates.
This commit is contained in:
parent
b56255ffaa
commit
67357d0bc3
20 changed files with 690 additions and 50 deletions
20
.idea/workspace.xml
generated
20
.idea/workspace.xml
generated
|
|
@ -5,17 +5,12 @@
|
||||||
</component>
|
</component>
|
||||||
<component name="ChangeListManager">
|
<component name="ChangeListManager">
|
||||||
<list default="true" id="d7a71994-2699-4ae4-9fd2-ee13b7f33d35" name="Changes" comment="docs: refactor documentation and upgrade to Node.js 24 This commit upgrades the project to Node.js 24 (LTS) and performs a major refactoring of the documentation to support both advanced users and AI-based automation (MCP). Changes: - Environment & CI/CD: - Set Node.js version to >=24 in package.json and .nvmrc. - Updated Dockerfile to use Node 24 base image. - Updated @types/node to ^24.10.9. - Documentation: - Refactored README.md with comprehensive technical reference, configuration details, and Zabbix-to-GraphQL mapping. - Created docs/howtos/cookbook.md with practical recipes for common tasks and AI test generation. - Updated docs/howtos/mcp.md to emphasize GraphQL's advantages for AI agents and Model Context Protocol. - Added readme.improvement.plan.md to track documentation evolution. - Enhanced all how-to guides with improved cross-references and up-to-date information. - Guidelines: - Updated .junie/guidelines.md with Node 24 requirements and enhanced commit message standards (Conventional Commits 1.0.0). - Infrastructure & Code: - Updated docker-compose.yml with Apollo MCP server integration. - Refined configuration and schema handling in src/api/ and src/datasources/. - Synchronized generated TypeScript types with schema updates.">
|
<list default="true" id="d7a71994-2699-4ae4-9fd2-ee13b7f33d35" name="Changes" comment="docs: refactor documentation and upgrade to Node.js 24 This commit upgrades the project to Node.js 24 (LTS) and performs a major refactoring of the documentation to support both advanced users and AI-based automation (MCP). Changes: - Environment & CI/CD: - Set Node.js version to >=24 in package.json and .nvmrc. - Updated Dockerfile to use Node 24 base image. - Updated @types/node to ^24.10.9. - Documentation: - Refactored README.md with comprehensive technical reference, configuration details, and Zabbix-to-GraphQL mapping. - Created docs/howtos/cookbook.md with practical recipes for common tasks and AI test generation. - Updated docs/howtos/mcp.md to emphasize GraphQL's advantages for AI agents and Model Context Protocol. - Added readme.improvement.plan.md to track documentation evolution. - Enhanced all how-to guides with improved cross-references and up-to-date information. - Guidelines: - Updated .junie/guidelines.md with Node 24 requirements and enhanced commit message standards (Conventional Commits 1.0.0). - Infrastructure & Code: - Updated docker-compose.yml with Apollo MCP server integration. - Refined configuration and schema handling in src/api/ and src/datasources/. - Synchronized generated TypeScript types with schema updates.">
|
||||||
<change afterPath="$PROJECT_DIR$/.ai/mcp/mcp.json" afterDir="false" />
|
|
||||||
<change afterPath="$PROJECT_DIR$/roadmap.md" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||||
<change beforePath="$PROJECT_DIR$/.junie/guidelines.md" beforeDir="false" afterPath="$PROJECT_DIR$/.junie/guidelines.md" afterDir="false" />
|
<change beforePath="$PROJECT_DIR$/schema/mutations.graphql" beforeDir="false" afterPath="$PROJECT_DIR$/schema/mutations.graphql" afterDir="false" />
|
||||||
<change beforePath="$PROJECT_DIR$/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/README.md" afterDir="false" />
|
<change beforePath="$PROJECT_DIR$/src/api/resolvers.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/api/resolvers.ts" afterDir="false" />
|
||||||
<change beforePath="$PROJECT_DIR$/docker-compose.yml" beforeDir="false" afterPath="$PROJECT_DIR$/docker-compose.yml" afterDir="false" />
|
<change beforePath="$PROJECT_DIR$/src/datasources/zabbix-templates.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/datasources/zabbix-templates.ts" afterDir="false" />
|
||||||
<change beforePath="$PROJECT_DIR$/docs/howtos/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/howtos/README.md" afterDir="false" />
|
<change beforePath="$PROJECT_DIR$/src/execution/host_importer.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/execution/host_importer.ts" afterDir="false" />
|
||||||
<change beforePath="$PROJECT_DIR$/docs/howtos/cookbook.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/howtos/cookbook.md" afterDir="false" />
|
<change beforePath="$PROJECT_DIR$/src/schema/generated/graphql.ts" beforeDir="false" afterPath="$PROJECT_DIR$/src/schema/generated/graphql.ts" afterDir="false" />
|
||||||
<change beforePath="$PROJECT_DIR$/docs/howtos/mcp.md" beforeDir="false" afterPath="$PROJECT_DIR$/docs/howtos/mcp.md" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/mcp-config.yaml" beforeDir="false" afterPath="$PROJECT_DIR$/mcp-config.yaml" afterDir="false" />
|
|
||||||
<change beforePath="$PROJECT_DIR$/package.json" beforeDir="false" afterPath="$PROJECT_DIR$/package.json" afterDir="false" />
|
|
||||||
</list>
|
</list>
|
||||||
<option name="SHOW_DIALOG" value="false" />
|
<option name="SHOW_DIALOG" value="false" />
|
||||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||||
|
|
@ -26,7 +21,7 @@
|
||||||
<execution />
|
<execution />
|
||||||
</component>
|
</component>
|
||||||
<component name="EmbeddingIndexingInfo">
|
<component name="EmbeddingIndexingInfo">
|
||||||
<option name="cachedIndexableFilesCount" value="111" />
|
<option name="cachedIndexableFilesCount" value="138" />
|
||||||
<option name="fileBasedEmbeddingIndicesEnabled" value="true" />
|
<option name="fileBasedEmbeddingIndicesEnabled" value="true" />
|
||||||
</component>
|
</component>
|
||||||
<component name="Git.Settings">
|
<component name="Git.Settings">
|
||||||
|
|
@ -215,7 +210,8 @@
|
||||||
<workItem from="1769699975260" duration="75000" />
|
<workItem from="1769699975260" duration="75000" />
|
||||||
<workItem from="1769700092648" duration="5212000" />
|
<workItem from="1769700092648" duration="5212000" />
|
||||||
<workItem from="1769724930397" duration="16056000" />
|
<workItem from="1769724930397" duration="16056000" />
|
||||||
<workItem from="1769789496322" duration="5496000" />
|
<workItem from="1769789496322" duration="14281000" />
|
||||||
|
<workItem from="1769849767328" duration="4117000" />
|
||||||
</task>
|
</task>
|
||||||
<task id="LOCAL-00001" summary="chore: Update IntelliJ workspace settings and add GitHub Actions workflow for Docker deployment">
|
<task id="LOCAL-00001" summary="chore: Update IntelliJ workspace settings and add GitHub Actions workflow for Docker deployment">
|
||||||
<option name="closed" value="true" />
|
<option name="closed" value="true" />
|
||||||
|
|
|
||||||
|
|
@ -28,14 +28,15 @@ The [Roadmap](../roadmap.md) is to be considered as outlook giving constraints o
|
||||||
## Common Scripts
|
## Common Scripts
|
||||||
- `npm run start`: Launches the development server with `tsx` and `nodemon` for hot-reloading.
|
- `npm run start`: Launches the development server with `tsx` and `nodemon` for hot-reloading.
|
||||||
- `npm run test`: Executes the Jest test suite.
|
- `npm run test`: Executes the Jest test suite.
|
||||||
- `npm run codegen`: Generates TypeScript types based on the GraphQL schema definitions.
|
- `npm run codegen`: Starts GraphQL Codegen in watch mode (for continuous development).
|
||||||
|
- `npx graphql-codegen --config codegen.ts`: Generates TypeScript types once (use this for one-off updates).
|
||||||
- `npm run compile`: Compiles TypeScript source files into the `dist/` directory.
|
- `npm run compile`: Compiles TypeScript source files into the `dist/` directory.
|
||||||
- `npm run prod`: Prepares the schema and runs the compiled production build.
|
- `npm run prod`: Prepares the schema and runs the compiled production build.
|
||||||
|
|
||||||
## Best Practices & Standards
|
## Best Practices & Standards
|
||||||
- **ESM & Imports**: The project uses ECMAScript Modules (ESM). Always use the `.js` extension when importing local files (e.g. `import { Config } from "../common_utils.js";`), even though the source files are `.ts`.
|
- **ESM & Imports**: The project uses ECMAScript Modules (ESM). Always use the `.js` extension when importing local files (e.g. `import { Config } from "../common_utils.js";`), even though the source files are `.ts`.
|
||||||
- **Configuration**: Always use the `Config` class to access environment variables. Avoid direct `process.env` calls.
|
- **Configuration**: Always use the `Config` class to access environment variables. Avoid direct `process.env` calls.
|
||||||
- **Type Safety**: Leverage types generated via `npm run codegen` for resolvers and data handling to ensure consistency with the schema.
|
- **Type Safety**: Leverage types generated via `npx graphql-codegen --config codegen.ts` (or `npm run codegen` for watch mode) for resolvers and data handling to ensure consistency with the schema.
|
||||||
- **Import Optimization**:
|
- **Import Optimization**:
|
||||||
- Always optimize imports before committing.
|
- Always optimize imports before committing.
|
||||||
- Project setting `OPTIMIZE_IMPORTS_BEFORE_PROJECT_COMMIT` is enabled.
|
- Project setting `OPTIMIZE_IMPORTS_BEFORE_PROJECT_COMMIT` is enabled.
|
||||||
|
|
@ -44,6 +45,14 @@ The [Roadmap](../roadmap.md) is to be considered as outlook giving constraints o
|
||||||
- **Testing**: Write reproduction tests for bugs and cover new features with both unit and integration tests in `src/test/`.
|
- **Testing**: Write reproduction tests for bugs and cover new features with both unit and integration tests in `src/test/`.
|
||||||
- **Grammar & Style**: Avoid using a comma after "e.g." or "i.e." (e.g. use "e.g. example" instead of "e.g., example").
|
- **Grammar & Style**: Avoid using a comma after "e.g." or "i.e." (e.g. use "e.g. example" instead of "e.g., example").
|
||||||
|
|
||||||
|
## Verification & Deployment
|
||||||
|
- **Pre-commit Verification**: Always add a verification stage to your plan before committing.
|
||||||
|
- *Action*: Run the `Smoketest` tool using MCP to ensure basic functionality is intact.
|
||||||
|
- *Action*: Monitor the API logs for errors after each service restart.
|
||||||
|
- **Environment Restart**: Always include a step to rebuild and restart the API and MCP server as a final check.
|
||||||
|
- *Command*: `docker compose up -d --build`
|
||||||
|
- *Requirement*: Ask the user if everything looks okay before executing the restart, and offer the option to skip this step.
|
||||||
|
|
||||||
### Documentation Style
|
### Documentation Style
|
||||||
- **Bullet Points**: Use bullet points instead of enumerations for lists to maintain consistency across all documentation.
|
- **Bullet Points**: Use bullet points instead of enumerations for lists to maintain consistency across all documentation.
|
||||||
- **Visual Style**: Use icons in headers and bold subjects for primary list items (e.g. `- **Feature**: Description`) to match the `README.md` style.
|
- **Visual Style**: Use icons in headers and bold subjects for primary list items (e.g. `- **Feature**: Description`) to match the `README.md` style.
|
||||||
|
|
|
||||||
|
|
@ -140,14 +140,15 @@ Execute the `importTemplates` mutation to create the template and items automati
|
||||||
Verify that the new type is available and correctly mapped by creating a test host and querying it.
|
Verify that the new type is available and correctly mapped by creating a test host and querying it.
|
||||||
|
|
||||||
#### 1. Create a Test Host
|
#### 1. Create a Test Host
|
||||||
Use the `importHosts` mutation (or `createHost` if IDs are already known) to create a host and explicitly set its `deviceType` to `DistanceTrackerDevice`.
|
Use the `importHosts` mutation (or `createHost` if IDs are already known) to create a host. Set its `deviceType` to `DistanceTrackerDevice` and link it to the `DISTANCE_TRACKER` template (created in Step 3) using the `templateNames` parameter.
|
||||||
|
|
||||||
```graphql
|
```graphql
|
||||||
mutation CreateTestDistanceTracker($host: String!, $groupNames: [String!]!) {
|
mutation CreateTestDistanceTracker($host: String!, $groupNames: [String!]!, $templateNames: [String]) {
|
||||||
importHosts(hosts: [{
|
importHosts(hosts: [{
|
||||||
deviceKey: $host,
|
deviceKey: $host,
|
||||||
deviceType: "DistanceTrackerDevice",
|
deviceType: "DistanceTrackerDevice",
|
||||||
groupNames: $groupNames
|
groupNames: $groupNames,
|
||||||
|
templateNames: $templateNames
|
||||||
}]) {
|
}]) {
|
||||||
hostid
|
hostid
|
||||||
message
|
message
|
||||||
|
|
@ -209,8 +210,8 @@ For more details on the input fields, see the [Reference: createHost](../../sche
|
||||||
AI agents should prefer using the `importHosts` MCP tool for provisioning as it allows using names for host groups instead of IDs.
|
AI agents should prefer using the `importHosts` MCP tool for provisioning as it allows using names for host groups instead of IDs.
|
||||||
|
|
||||||
```graphql
|
```graphql
|
||||||
mutation CreateNewHost($host: String!, $groups: [Int!]!, $templates: [Int!]!) {
|
mutation CreateNewHost($host: String!, $groups: [Int!]!, $templates: [Int], $templateNames: [String]) {
|
||||||
createHost(host: $host, hostgroupids: $groups, templateids: $templates) {
|
createHost(host: $host, hostgroupids: $groups, templateids: $templates, templateNames: $templateNames) {
|
||||||
hostids
|
hostids
|
||||||
error {
|
error {
|
||||||
message
|
message
|
||||||
|
|
|
||||||
|
|
@ -12,8 +12,14 @@ The project uses [GraphQL Codegen](https://the-guild.dev/graphql/codegen) to gen
|
||||||
- **Generated Output**: `src/schema/generated/graphql.ts`
|
- **Generated Output**: `src/schema/generated/graphql.ts`
|
||||||
|
|
||||||
#### How to Regenerate Types
|
#### How to Regenerate Types
|
||||||
Whenever you modify any `.graphql` files in the `schema/` directory, you must regenerate the TypeScript types:
|
Whenever you modify any `.graphql` files in the `schema/` directory, you must regenerate the TypeScript types.
|
||||||
|
|
||||||
|
For a one-off update (e.g. in a script or before commit):
|
||||||
|
```bash
|
||||||
|
npx graphql-codegen --config codegen.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are a developer and want to watch for schema changes continuously:
|
||||||
```bash
|
```bash
|
||||||
npm run codegen
|
npm run codegen
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
mutation CreateHost($host: String!, $hostgroupids: [Int!]!, $templateids: [Int!]!) {
|
mutation CreateHost($host: String!, $hostgroupids: [Int!]!, $templateids: [Int], $templateNames: [String]) {
|
||||||
createHost(host: $host, hostgroupids: $hostgroupids, templateids: $templateids) {
|
createHost(host: $host, hostgroupids: $hostgroupids, templateids: $templateids, templateNames: $templateNames) {
|
||||||
hostids
|
hostids
|
||||||
error {
|
error {
|
||||||
message
|
message
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
mutation CreateVerificationHost($deviceKey: String!, $deviceType: String!, $groupNames: [String!]!) {
|
mutation CreateVerificationHost($deviceKey: String!, $deviceType: String!, $groupNames: [String!]!, $templateNames: [String]) {
|
||||||
importHosts(hosts: [{
|
importHosts(hosts: [{
|
||||||
deviceKey: $deviceKey,
|
deviceKey: $deviceKey,
|
||||||
deviceType: $deviceType,
|
deviceType: $deviceType,
|
||||||
groupNames: $groupNames
|
groupNames: $groupNames,
|
||||||
|
templateNames: $templateNames
|
||||||
}]) {
|
}]) {
|
||||||
hostid
|
hostid
|
||||||
message
|
message
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
# Import multiple hosts/devices into Zabbix.
|
# Import multiple hosts/devices into Zabbix.
|
||||||
# This is a powerful tool for bulk provisioning of hosts using their names and types.
|
# This is a powerful tool for bulk provisioning of hosts using their names and types.
|
||||||
|
# It supports linking templates by ID (templateids) or by name (templateNames).
|
||||||
mutation ImportHosts($hosts: [CreateHost!]!) {
|
mutation ImportHosts($hosts: [CreateHost!]!) {
|
||||||
importHosts(hosts: $hosts) {
|
importHosts(hosts: $hosts) {
|
||||||
hostid
|
hostid
|
||||||
|
|
|
||||||
14
mcp/operations/runSmoketest.graphql
Normal file
14
mcp/operations/runSmoketest.graphql
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
# Run a complete smoketest: creates a template, host group, and host,
|
||||||
|
# verifies their creation and linkage, and then cleans up everything.
|
||||||
|
# Variables: hostName, templateName, groupName
|
||||||
|
mutation RunSmoketest($hostName: String!, $templateName: String!, $groupName: String!) {
|
||||||
|
runSmoketest(hostName: $hostName, templateName: $templateName, groupName: $groupName) {
|
||||||
|
success
|
||||||
|
message
|
||||||
|
steps {
|
||||||
|
name
|
||||||
|
success
|
||||||
|
message
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -11,7 +11,9 @@ type Mutation {
|
||||||
"""List of host group IDs to assign the host to."""
|
"""List of host group IDs to assign the host to."""
|
||||||
hostgroupids:[Int!]!,
|
hostgroupids:[Int!]!,
|
||||||
"""List of template IDs to link to the host."""
|
"""List of template IDs to link to the host."""
|
||||||
templateids: [Int!]!,
|
templateids: [Int],
|
||||||
|
"""List of template names to link to the host."""
|
||||||
|
templateNames: [String],
|
||||||
"""Optional location information for the host inventory."""
|
"""Optional location information for the host inventory."""
|
||||||
location: LocationInput
|
location: LocationInput
|
||||||
): CreateHostResponse
|
): CreateHostResponse
|
||||||
|
|
@ -100,6 +102,78 @@ type Mutation {
|
||||||
"""Wildcard name pattern for template groups to delete."""
|
"""Wildcard name pattern for template groups to delete."""
|
||||||
name_pattern: String
|
name_pattern: String
|
||||||
): [DeleteResponse!]
|
): [DeleteResponse!]
|
||||||
|
|
||||||
|
"""
|
||||||
|
Delete hosts by their IDs or by a name pattern.
|
||||||
|
|
||||||
|
Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
||||||
|
"""
|
||||||
|
deleteHosts(
|
||||||
|
"""List of host IDs to delete."""
|
||||||
|
hostids: [Int!],
|
||||||
|
"""Wildcard name pattern for hosts to delete."""
|
||||||
|
name_pattern: String
|
||||||
|
): [DeleteResponse!]
|
||||||
|
|
||||||
|
"""
|
||||||
|
Delete host groups by their IDs or by a name pattern.
|
||||||
|
|
||||||
|
Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
||||||
|
"""
|
||||||
|
deleteHostGroups(
|
||||||
|
"""List of host group IDs to delete."""
|
||||||
|
groupids: [Int!],
|
||||||
|
"""Wildcard name pattern for host groups to delete."""
|
||||||
|
name_pattern: String
|
||||||
|
): [DeleteResponse!]
|
||||||
|
|
||||||
|
"""
|
||||||
|
Runs a smoketest: creates a template, links a host, verifies it, and cleans up.
|
||||||
|
"""
|
||||||
|
runSmoketest(
|
||||||
|
"""Technical name for the smoketest host."""
|
||||||
|
hostName: String!,
|
||||||
|
"""Technical name for the smoketest template."""
|
||||||
|
templateName: String!,
|
||||||
|
"""Technical name for the smoketest host group."""
|
||||||
|
groupName: String!
|
||||||
|
): SmoketestResponse!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
Response object for the smoketest operation.
|
||||||
|
"""
|
||||||
|
type SmoketestResponse {
|
||||||
|
"""
|
||||||
|
True if all steps of the smoketest succeeded.
|
||||||
|
"""
|
||||||
|
success: Boolean!
|
||||||
|
"""
|
||||||
|
Overall status message.
|
||||||
|
"""
|
||||||
|
message: String
|
||||||
|
"""
|
||||||
|
Detailed results for each step.
|
||||||
|
"""
|
||||||
|
steps: [SmoketestStep!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
Results for a single step in the smoketest.
|
||||||
|
"""
|
||||||
|
type SmoketestStep {
|
||||||
|
"""
|
||||||
|
Name of the step (e.g. 'Create Template').
|
||||||
|
"""
|
||||||
|
name: String!
|
||||||
|
"""
|
||||||
|
True if the step succeeded.
|
||||||
|
"""
|
||||||
|
success: Boolean!
|
||||||
|
"""
|
||||||
|
Status message or error message for the step.
|
||||||
|
"""
|
||||||
|
message: String
|
||||||
}
|
}
|
||||||
|
|
||||||
####################################################################
|
####################################################################
|
||||||
|
|
@ -413,6 +487,14 @@ input CreateHost {
|
||||||
"""
|
"""
|
||||||
groupids: [Int]
|
groupids: [Int]
|
||||||
"""
|
"""
|
||||||
|
List of template IDs to link to the host.
|
||||||
|
"""
|
||||||
|
templateids: [Int]
|
||||||
|
"""
|
||||||
|
List of template names to link to the host.
|
||||||
|
"""
|
||||||
|
templateNames: [String]
|
||||||
|
"""
|
||||||
Location information for the host.
|
Location information for the host.
|
||||||
"""
|
"""
|
||||||
location: LocationInput
|
location: LocationInput
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,8 @@ import {
|
||||||
} from "../schema/generated/graphql.js";
|
} from "../schema/generated/graphql.js";
|
||||||
|
|
||||||
import {HostImporter} from "../execution/host_importer.js";
|
import {HostImporter} from "../execution/host_importer.js";
|
||||||
|
import {HostDeleter} from "../execution/host_deleter.js";
|
||||||
|
import {SmoketestExecutor} from "../execution/smoketest_executor.js";
|
||||||
import {TemplateImporter} from "../execution/template_importer.js";
|
import {TemplateImporter} from "../execution/template_importer.js";
|
||||||
import {TemplateDeleter} from "../execution/template_deleter.js";
|
import {TemplateDeleter} from "../execution/template_deleter.js";
|
||||||
import {HostValueExporter} from "../execution/host_exporter.js";
|
import {HostValueExporter} from "../execution/host_exporter.js";
|
||||||
|
|
@ -48,7 +50,11 @@ import {
|
||||||
ZabbixImportUserRolesRequest,
|
ZabbixImportUserRolesRequest,
|
||||||
ZabbixQueryUserRolesRequest
|
ZabbixQueryUserRolesRequest
|
||||||
} from "../datasources/zabbix-userroles.js";
|
} from "../datasources/zabbix-userroles.js";
|
||||||
import {ZabbixQueryTemplateGroupRequest, ZabbixQueryTemplatesRequest} from "../datasources/zabbix-templates.js";
|
import {
|
||||||
|
TemplateHelper,
|
||||||
|
ZabbixQueryTemplateGroupRequest,
|
||||||
|
ZabbixQueryTemplatesRequest
|
||||||
|
} from "../datasources/zabbix-templates.js";
|
||||||
import {zabbixAPI} from "../datasources/zabbix-api.js";
|
import {zabbixAPI} from "../datasources/zabbix-api.js";
|
||||||
import {GraphQLInterfaceType, GraphQLList} from "graphql/type/index.js";
|
import {GraphQLInterfaceType, GraphQLList} from "graphql/type/index.js";
|
||||||
import {isDevice} from "./resolver_helpers.js";
|
import {isDevice} from "./resolver_helpers.js";
|
||||||
|
|
@ -182,6 +188,17 @@ export function createResolvers(): Resolvers {
|
||||||
zabbixAuthToken,
|
zabbixAuthToken,
|
||||||
cookie
|
cookie
|
||||||
}: any) => {
|
}: any) => {
|
||||||
|
if (args.templateNames?.length) {
|
||||||
|
const templateidsByName = await TemplateHelper.findTemplateIdsByName(args.templateNames as string[], zabbixAPI, zabbixAuthToken, cookie);
|
||||||
|
if (!templateidsByName) {
|
||||||
|
return {
|
||||||
|
error: {
|
||||||
|
message: `Unable to find templates: ${args.templateNames}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
args.templateids = (args.templateids || []).concat(templateidsByName);
|
||||||
|
}
|
||||||
return await new ZabbixCreateHostRequest(zabbixAuthToken, cookie).executeRequestThrowError(
|
return await new ZabbixCreateHostRequest(zabbixAuthToken, cookie).executeRequestThrowError(
|
||||||
zabbixAPI,
|
zabbixAPI,
|
||||||
new ParsedArgs(args)
|
new ParsedArgs(args)
|
||||||
|
|
@ -241,6 +258,24 @@ export function createResolvers(): Resolvers {
|
||||||
cookie
|
cookie
|
||||||
}: any) => {
|
}: any) => {
|
||||||
return TemplateDeleter.deleteTemplateGroups(args.groupids, args.name_pattern, zabbixAuthToken, cookie)
|
return TemplateDeleter.deleteTemplateGroups(args.groupids, args.name_pattern, zabbixAuthToken, cookie)
|
||||||
|
},
|
||||||
|
deleteHosts: async (_parent: any, args: any, {
|
||||||
|
zabbixAuthToken,
|
||||||
|
cookie
|
||||||
|
}: any) => {
|
||||||
|
return HostDeleter.deleteHosts(args.hostids, args.name_pattern, zabbixAuthToken, cookie)
|
||||||
|
},
|
||||||
|
deleteHostGroups: async (_parent: any, args: any, {
|
||||||
|
zabbixAuthToken,
|
||||||
|
cookie
|
||||||
|
}: any) => {
|
||||||
|
return HostDeleter.deleteHostGroups(args.groupids, args.name_pattern, zabbixAuthToken, cookie)
|
||||||
|
},
|
||||||
|
runSmoketest: async (_parent: any, args: any, {
|
||||||
|
zabbixAuthToken,
|
||||||
|
cookie
|
||||||
|
}: any) => {
|
||||||
|
return SmoketestExecutor.runSmoketest(args.hostName, args.templateName, args.groupName, zabbixAuthToken, cookie)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -69,6 +69,11 @@ export class ZabbixQueryHostgroupsRequest extends ZabbixRequestWithPermissions<Z
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class ZabbixDeleteHostGroupsRequest extends ZabbixRequestWithPermissions<{ groupids: string[] }> {
|
||||||
|
constructor(authToken?: string | null, cookie?: string | null) {
|
||||||
|
super("hostgroup.delete", authToken, cookie, hostGroupReadWritePermissions);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export class GroupHelper {
|
export class GroupHelper {
|
||||||
public static groupFullName(groupName: string) {
|
public static groupFullName(groupName: string) {
|
||||||
|
|
|
||||||
|
|
@ -228,3 +228,9 @@ export class ZabbixCreateHostRequest extends ZabbixRequest<CreateHostResponse> {
|
||||||
return args?.zabbix_params || {};
|
return args?.zabbix_params || {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class ZabbixDeleteHostsRequest extends ZabbixRequest<{ hostids: string[] }> {
|
||||||
|
constructor(authToken?: string | null, cookie?: string | null) {
|
||||||
|
super("host.delete", authToken, cookie);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,6 @@
|
||||||
import {ZabbixRequest} from "./zabbix-request.js";
|
import {ZabbixRequest, ParsedArgs, isZabbixErrorResult} from "./zabbix-request.js";
|
||||||
|
import {ZabbixAPI} from "./zabbix-api.js";
|
||||||
|
import {logger} from "../logging/logger.js";
|
||||||
|
|
||||||
|
|
||||||
export interface ZabbixQueryTemplateResponse {
|
export interface ZabbixQueryTemplateResponse {
|
||||||
|
|
@ -65,3 +67,22 @@ export class ZabbixDeleteTemplateGroupsRequest extends ZabbixRequest<{ groupids:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export class TemplateHelper {
|
||||||
|
public static async findTemplateIdsByName(templateNames: string[], zabbixApi: ZabbixAPI, zabbixAuthToken?: string, cookie?: string) {
|
||||||
|
let result: number[] = []
|
||||||
|
for (let templateName of templateNames) {
|
||||||
|
let templates = await new ZabbixQueryTemplatesRequest(zabbixAuthToken, cookie).executeRequestReturnError(zabbixApi, new ParsedArgs({
|
||||||
|
filter_name: templateName
|
||||||
|
}))
|
||||||
|
|
||||||
|
if (isZabbixErrorResult(templates) || !templates?.length) {
|
||||||
|
logger.error(`Unable to find templateName=${templateName}`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
result.push(...templates.map((t) => Number(t.templateid)))
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
110
src/execution/host_deleter.ts
Normal file
110
src/execution/host_deleter.ts
Normal file
|
|
@ -0,0 +1,110 @@
|
||||||
|
import {DeleteResponse} from "../schema/generated/graphql.js";
|
||||||
|
import {
|
||||||
|
ZabbixDeleteHostsRequest,
|
||||||
|
ZabbixQueryHostsGenericRequest,
|
||||||
|
} from "../datasources/zabbix-hosts.js";
|
||||||
|
import {
|
||||||
|
ZabbixDeleteHostGroupsRequest,
|
||||||
|
ZabbixQueryHostgroupsRequest,
|
||||||
|
ZabbixQueryHostgroupsParams,
|
||||||
|
GroupHelper
|
||||||
|
} from "../datasources/zabbix-hostgroups.js";
|
||||||
|
import {isZabbixErrorResult, ParsedArgs} from "../datasources/zabbix-request.js";
|
||||||
|
import {zabbixAPI} from "../datasources/zabbix-api.js";
|
||||||
|
|
||||||
|
export class HostDeleter {
|
||||||
|
|
||||||
|
public static async deleteHosts(hostids: number[] | null | undefined, name_pattern?: string | null, zabbixAuthToken?: string, cookie?: string): Promise<DeleteResponse[]> {
|
||||||
|
const result: DeleteResponse[] = [];
|
||||||
|
let idsToDelete = hostids ? [...hostids] : [];
|
||||||
|
|
||||||
|
if (name_pattern) {
|
||||||
|
const queryResult = await new ZabbixQueryHostsGenericRequest("host.get", zabbixAuthToken, cookie)
|
||||||
|
.executeRequestReturnError(zabbixAPI, new ParsedArgs({ name_pattern: name_pattern }));
|
||||||
|
|
||||||
|
if (!isZabbixErrorResult(queryResult) && Array.isArray(queryResult)) {
|
||||||
|
const foundIds = queryResult.map((t: any) => Number(t.hostid));
|
||||||
|
// Merge and deduplicate
|
||||||
|
idsToDelete = Array.from(new Set([...idsToDelete, ...foundIds]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idsToDelete.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteResult = await new ZabbixDeleteHostsRequest(zabbixAuthToken, cookie)
|
||||||
|
.executeRequestReturnError(zabbixAPI, new ParsedArgs(idsToDelete));
|
||||||
|
|
||||||
|
if (isZabbixErrorResult(deleteResult)) {
|
||||||
|
let errorMessage = deleteResult.error.message;
|
||||||
|
if (deleteResult.error.data) {
|
||||||
|
errorMessage += " " + (typeof deleteResult.error.data === 'string' ? deleteResult.error.data : JSON.stringify(deleteResult.error.data));
|
||||||
|
}
|
||||||
|
for (const id of idsToDelete) {
|
||||||
|
result.push({
|
||||||
|
id: id,
|
||||||
|
message: errorMessage,
|
||||||
|
error: deleteResult.error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else if (deleteResult?.hostids) {
|
||||||
|
for (const id of idsToDelete) {
|
||||||
|
result.push({
|
||||||
|
id: id,
|
||||||
|
message: `Host ${id} deleted successfully`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async deleteHostGroups(groupids: number[] | null | undefined, name_pattern?: string | null, zabbixAuthToken?: string, cookie?: string): Promise<DeleteResponse[]> {
|
||||||
|
const result: DeleteResponse[] = [];
|
||||||
|
let idsToDelete = groupids ? [...groupids] : [];
|
||||||
|
|
||||||
|
if (name_pattern) {
|
||||||
|
const queryResult = await new ZabbixQueryHostgroupsRequest(zabbixAuthToken, cookie)
|
||||||
|
.executeRequestReturnError(zabbixAPI, new ZabbixQueryHostgroupsParams({
|
||||||
|
filter_name: GroupHelper.groupFullName(name_pattern)
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (!isZabbixErrorResult(queryResult) && Array.isArray(queryResult)) {
|
||||||
|
const foundIds = queryResult.map(g => Number(g.groupid));
|
||||||
|
// Merge and deduplicate
|
||||||
|
idsToDelete = Array.from(new Set([...idsToDelete, ...foundIds]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idsToDelete.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteResult = await new ZabbixDeleteHostGroupsRequest(zabbixAuthToken, cookie)
|
||||||
|
.executeRequestReturnError(zabbixAPI, new ParsedArgs(idsToDelete));
|
||||||
|
|
||||||
|
if (isZabbixErrorResult(deleteResult)) {
|
||||||
|
let errorMessage = deleteResult.error.message;
|
||||||
|
if (deleteResult.error.data) {
|
||||||
|
errorMessage += " " + (typeof deleteResult.error.data === 'string' ? deleteResult.error.data : JSON.stringify(deleteResult.error.data));
|
||||||
|
}
|
||||||
|
for (const id of idsToDelete) {
|
||||||
|
result.push({
|
||||||
|
id: id,
|
||||||
|
message: errorMessage,
|
||||||
|
error: deleteResult.error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else if (deleteResult?.groupids) {
|
||||||
|
for (const id of idsToDelete) {
|
||||||
|
result.push({
|
||||||
|
id: id,
|
||||||
|
message: `Host group ${id} deleted successfully`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -6,7 +6,8 @@ import {
|
||||||
InputMaybe
|
InputMaybe
|
||||||
} from "../schema/generated/graphql.js";
|
} from "../schema/generated/graphql.js";
|
||||||
import {logger} from "../logging/logger.js";
|
import {logger} from "../logging/logger.js";
|
||||||
import {ZabbixQueryTemplatesRequest} from "../datasources/zabbix-templates.js";
|
import {ZabbixCreateHostRequest} from "../datasources/zabbix-hosts.js";
|
||||||
|
import {ZabbixQueryTemplatesRequest, TemplateHelper} from "../datasources/zabbix-templates.js";
|
||||||
import {isZabbixErrorResult, ParsedArgs, ZabbixErrorResult} from "../datasources/zabbix-request.js";
|
import {isZabbixErrorResult, ParsedArgs, ZabbixErrorResult} from "../datasources/zabbix-request.js";
|
||||||
import {CreateHostGroupResult, GroupHelper, ZabbixCreateHostGroupRequest} from "../datasources/zabbix-hostgroups.js";
|
import {CreateHostGroupResult, GroupHelper, ZabbixCreateHostGroupRequest} from "../datasources/zabbix-hostgroups.js";
|
||||||
import {ZABBIX_EDGE_DEVICE_BASE_GROUP, zabbixAPI} from "../datasources/zabbix-api.js";
|
import {ZABBIX_EDGE_DEVICE_BASE_GROUP, zabbixAPI} from "../datasources/zabbix-api.js";
|
||||||
|
|
@ -110,32 +111,49 @@ export class HostImporter {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let deviceImportResult: {
|
|
||||||
hostids?: string[];
|
let templateids = device.templateids ? [...device.templateids as number[]] : [];
|
||||||
error?: any;
|
if (device.templateNames?.length) {
|
||||||
} = await zabbixAPI.requestByPath("host.create", new ParsedArgs(
|
const resolvedTemplateids = await TemplateHelper.findTemplateIdsByName(device.templateNames as string[], zabbixAPI, zabbixAuthToken, cookie);
|
||||||
|
if (resolvedTemplateids) {
|
||||||
|
templateids.push(...resolvedTemplateids);
|
||||||
|
} else {
|
||||||
|
result.push({
|
||||||
|
deviceKey: device.deviceKey,
|
||||||
|
message: `Unable to find templates: ${device.templateNames}`
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (templateids.length === 0) {
|
||||||
|
const defaultTemplateId = await HostImporter.getTemplateIdForDeviceType(device.deviceType, zabbixAuthToken, cookie);
|
||||||
|
if (defaultTemplateId) {
|
||||||
|
templateids.push(defaultTemplateId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let deviceImportResult = await new ZabbixCreateHostRequest(zabbixAuthToken, cookie).executeRequestReturnError(zabbixAPI, new ParsedArgs(
|
||||||
{
|
{
|
||||||
host: device.deviceKey,
|
host: device.deviceKey,
|
||||||
name: device.name,
|
name: device.name,
|
||||||
location: device.location,
|
location: device.location,
|
||||||
templateids: [
|
templateids: templateids,
|
||||||
await HostImporter.getTemplateIdForDeviceType(
|
|
||||||
device.deviceType, zabbixAuthToken, cookie)],
|
|
||||||
hostgroupids: groupids
|
hostgroupids: groupids
|
||||||
}
|
}
|
||||||
), zabbixAuthToken, cookie)
|
))
|
||||||
if (deviceImportResult?.hostids?.length) {
|
|
||||||
result.push({
|
if (isZabbixErrorResult(deviceImportResult)) {
|
||||||
deviceKey: device.deviceKey,
|
|
||||||
hostid: deviceImportResult.hostids[0],
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
result.push({
|
result.push({
|
||||||
deviceKey: device.deviceKey,
|
deviceKey: device.deviceKey,
|
||||||
message: `Unable to import deviceKey=${device.deviceKey}: ${deviceImportResult.error.message}`,
|
message: `Unable to import deviceKey=${device.deviceKey}: ${deviceImportResult.error.message}`,
|
||||||
error: deviceImportResult.error
|
error: deviceImportResult.error
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
result.push({
|
||||||
|
deviceKey: device.deviceKey,
|
||||||
|
hostid: deviceImportResult.hostids![0]?.toString(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
158
src/execution/smoketest_executor.ts
Normal file
158
src/execution/smoketest_executor.ts
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
import {SmoketestResponse, SmoketestStep} from "../schema/generated/graphql.js";
|
||||||
|
import {TemplateImporter} from "./template_importer.js";
|
||||||
|
import {HostImporter} from "./host_importer.js";
|
||||||
|
import {HostDeleter} from "./host_deleter.js";
|
||||||
|
import {TemplateDeleter} from "./template_deleter.js";
|
||||||
|
import {zabbixAPI} from "../datasources/zabbix-api.js";
|
||||||
|
import {ZabbixQueryHostsGenericRequest} from "../datasources/zabbix-hosts.js";
|
||||||
|
import {ParsedArgs} from "../datasources/zabbix-request.js";
|
||||||
|
|
||||||
|
export class SmoketestExecutor {
|
||||||
|
public static async runSmoketest(hostName: string, templateName: string, groupName: string, zabbixAuthToken?: string, cookie?: string): Promise<SmoketestResponse> {
|
||||||
|
const steps: SmoketestStep[] = [];
|
||||||
|
let success = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Step 0: Create Template Group
|
||||||
|
const templateGroupResult = await TemplateImporter.importTemplateGroups([{
|
||||||
|
groupName: groupName
|
||||||
|
}], zabbixAuthToken, cookie);
|
||||||
|
const templateGroupSuccess = !!templateGroupResult?.length && !templateGroupResult[0].error;
|
||||||
|
steps.push({
|
||||||
|
name: "Create Template Group",
|
||||||
|
success: templateGroupSuccess,
|
||||||
|
message: templateGroupSuccess ? `Template group ${groupName} created` : `Failed: ${templateGroupResult?.[0]?.error?.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
if (!templateGroupSuccess) success = false;
|
||||||
|
|
||||||
|
// Step 1: Create Template
|
||||||
|
if (success) {
|
||||||
|
const templateResult = await TemplateImporter.importTemplates([{
|
||||||
|
host: templateName,
|
||||||
|
name: templateName,
|
||||||
|
groupNames: [groupName]
|
||||||
|
}], zabbixAuthToken, cookie);
|
||||||
|
|
||||||
|
const templateSuccess = !!templateResult?.length && !templateResult[0].error;
|
||||||
|
steps.push({
|
||||||
|
name: "Create Template",
|
||||||
|
success: templateSuccess,
|
||||||
|
message: templateSuccess ? `Template ${templateName} created` : `Failed: ${templateResult?.[0]?.error?.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
if (!templateSuccess) success = false;
|
||||||
|
} else {
|
||||||
|
steps.push({ name: "Create Template", success: false, message: "Skipped due to previous failures" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Create Host Group
|
||||||
|
const groupResult = await HostImporter.importHostGroups([{
|
||||||
|
groupName: groupName
|
||||||
|
}], zabbixAuthToken, cookie);
|
||||||
|
|
||||||
|
const groupSuccess = !!groupResult?.length && !groupResult[0].error;
|
||||||
|
steps.push({
|
||||||
|
name: "Create Host Group",
|
||||||
|
success: groupSuccess,
|
||||||
|
message: groupSuccess ? `Host group ${groupName} created` : `Failed: ${groupResult?.[0]?.error?.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
if (!groupSuccess) success = false;
|
||||||
|
|
||||||
|
// Step 3: Create Host and Link to Template
|
||||||
|
if (success) {
|
||||||
|
const hostResult = await HostImporter.importHosts([{
|
||||||
|
deviceKey: hostName,
|
||||||
|
deviceType: "ZabbixHost",
|
||||||
|
groupNames: [groupName],
|
||||||
|
templateNames: [templateName]
|
||||||
|
}], zabbixAuthToken, cookie);
|
||||||
|
|
||||||
|
const hostSuccess = !!hostResult?.length && !hostResult[0].error;
|
||||||
|
steps.push({
|
||||||
|
name: "Create and Link Host",
|
||||||
|
success: hostSuccess,
|
||||||
|
message: hostSuccess ? `Host ${hostName} created and linked to ${templateName}` : `Failed: ${hostResult?.[0]?.error?.message || "Unknown error"}`
|
||||||
|
});
|
||||||
|
if (!hostSuccess) success = false;
|
||||||
|
} else {
|
||||||
|
steps.push({ name: "Create and Link Host", success: false, message: "Skipped due to previous failures" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Verify Host Linkage
|
||||||
|
if (success) {
|
||||||
|
const verifyResult = await new ZabbixQueryHostsGenericRequest("host.get", zabbixAuthToken, cookie)
|
||||||
|
.executeRequestReturnError(zabbixAPI, new ParsedArgs({
|
||||||
|
filter_host: hostName,
|
||||||
|
selectParentTemplates: ["name"]
|
||||||
|
}));
|
||||||
|
|
||||||
|
let verified = false;
|
||||||
|
if (Array.isArray(verifyResult) && verifyResult.length > 0) {
|
||||||
|
const host = verifyResult[0] as any;
|
||||||
|
const linkedTemplates = host.parentTemplates || [];
|
||||||
|
verified = linkedTemplates.some((t: any) => t.name === templateName);
|
||||||
|
}
|
||||||
|
|
||||||
|
steps.push({
|
||||||
|
name: "Verify Host Linkage",
|
||||||
|
success: verified,
|
||||||
|
message: verified ? `Verification successful: Host ${hostName} is linked to ${templateName}` : `Verification failed: Host or linkage not found`
|
||||||
|
});
|
||||||
|
if (!verified) success = false;
|
||||||
|
} else {
|
||||||
|
steps.push({ name: "Verify Host Linkage", success: false, message: "Skipped due to previous failures" });
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error: any) {
|
||||||
|
success = false;
|
||||||
|
steps.push({
|
||||||
|
name: "Execution Error",
|
||||||
|
success: false,
|
||||||
|
message: error.message || String(error)
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
// Step 5: Cleanup
|
||||||
|
const cleanupSteps: SmoketestStep[] = [];
|
||||||
|
|
||||||
|
// Delete Host
|
||||||
|
const deleteHostRes = await HostDeleter.deleteHosts(null, hostName, zabbixAuthToken, cookie);
|
||||||
|
cleanupSteps.push({
|
||||||
|
name: "Cleanup: Delete Host",
|
||||||
|
success: deleteHostRes.every(r => !r.error),
|
||||||
|
message: deleteHostRes.length > 0 ? deleteHostRes[0].message : "Host not found for deletion"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Delete Template
|
||||||
|
const deleteTemplateRes = await TemplateDeleter.deleteTemplates(null, templateName, zabbixAuthToken, cookie);
|
||||||
|
cleanupSteps.push({
|
||||||
|
name: "Cleanup: Delete Template",
|
||||||
|
success: deleteTemplateRes.every(r => !r.error),
|
||||||
|
message: deleteTemplateRes.length > 0 ? deleteTemplateRes[0].message : "Template not found for deletion"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Delete Host Group
|
||||||
|
const deleteGroupRes = await HostDeleter.deleteHostGroups(null, groupName, zabbixAuthToken, cookie);
|
||||||
|
cleanupSteps.push({
|
||||||
|
name: "Cleanup: Delete Host Group",
|
||||||
|
success: deleteGroupRes.every(r => !r.error),
|
||||||
|
message: deleteGroupRes.length > 0 ? deleteGroupRes[0].message : "Host group not found for deletion"
|
||||||
|
});
|
||||||
|
|
||||||
|
// We also need to delete the template group if it's different or just try to delete it
|
||||||
|
// In our setup, TemplateImporter creates it if it doesn't exist.
|
||||||
|
const deleteTemplateGroupRes = await TemplateDeleter.deleteTemplateGroups(null, groupName, zabbixAuthToken, cookie);
|
||||||
|
cleanupSteps.push({
|
||||||
|
name: "Cleanup: Delete Template Group",
|
||||||
|
success: deleteTemplateGroupRes.every(r => !r.error),
|
||||||
|
message: deleteTemplateGroupRes.length > 0 ? deleteTemplateGroupRes[0].message : "Template group not found for deletion"
|
||||||
|
});
|
||||||
|
|
||||||
|
steps.push(...cleanupSteps);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success,
|
||||||
|
message: success ? "Smoketest passed successfully" : "Smoketest failed",
|
||||||
|
steps
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -54,6 +54,10 @@ export interface CreateHost {
|
||||||
location?: InputMaybe<LocationInput>;
|
location?: InputMaybe<LocationInput>;
|
||||||
/** Optional display name of the device (must be unique if provided - default is to set display name to deviceKey). */
|
/** Optional display name of the device (must be unique if provided - default is to set display name to deviceKey). */
|
||||||
name?: InputMaybe<Scalars['String']['input']>;
|
name?: InputMaybe<Scalars['String']['input']>;
|
||||||
|
/** List of template names to link to the host. */
|
||||||
|
templateNames?: InputMaybe<Array<InputMaybe<Scalars['String']['input']>>>;
|
||||||
|
/** List of template IDs to link to the host. */
|
||||||
|
templateids?: InputMaybe<Array<InputMaybe<Scalars['Int']['input']>>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Input for creating or identifying a host group. */
|
/** Input for creating or identifying a host group. */
|
||||||
|
|
@ -478,6 +482,18 @@ export interface Mutation {
|
||||||
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
||||||
*/
|
*/
|
||||||
createHost?: Maybe<CreateHostResponse>;
|
createHost?: Maybe<CreateHostResponse>;
|
||||||
|
/**
|
||||||
|
* Delete host groups by their IDs or by a name pattern.
|
||||||
|
*
|
||||||
|
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
||||||
|
*/
|
||||||
|
deleteHostGroups?: Maybe<Array<DeleteResponse>>;
|
||||||
|
/**
|
||||||
|
* Delete hosts by their IDs or by a name pattern.
|
||||||
|
*
|
||||||
|
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
||||||
|
*/
|
||||||
|
deleteHosts?: Maybe<Array<DeleteResponse>>;
|
||||||
/**
|
/**
|
||||||
* Delete template groups by their IDs or by a name pattern.
|
* Delete template groups by their IDs or by a name pattern.
|
||||||
*
|
*
|
||||||
|
|
@ -528,6 +544,8 @@ export interface Mutation {
|
||||||
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
* Authentication: Requires `zbx_session` cookie or `zabbix-auth-token` header.
|
||||||
*/
|
*/
|
||||||
importUserRights?: Maybe<ImportUserRightsResult>;
|
importUserRights?: Maybe<ImportUserRightsResult>;
|
||||||
|
/** Runs a smoketest: creates a template, links a host, verifies it, and cleans up. */
|
||||||
|
runSmoketest: SmoketestResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -535,7 +553,20 @@ export interface MutationCreateHostArgs {
|
||||||
host: Scalars['String']['input'];
|
host: Scalars['String']['input'];
|
||||||
hostgroupids: Array<Scalars['Int']['input']>;
|
hostgroupids: Array<Scalars['Int']['input']>;
|
||||||
location?: InputMaybe<LocationInput>;
|
location?: InputMaybe<LocationInput>;
|
||||||
templateids: Array<Scalars['Int']['input']>;
|
templateNames?: InputMaybe<Array<InputMaybe<Scalars['String']['input']>>>;
|
||||||
|
templateids?: InputMaybe<Array<InputMaybe<Scalars['Int']['input']>>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export interface MutationDeleteHostGroupsArgs {
|
||||||
|
groupids?: InputMaybe<Array<Scalars['Int']['input']>>;
|
||||||
|
name_pattern?: InputMaybe<Scalars['String']['input']>;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export interface MutationDeleteHostsArgs {
|
||||||
|
hostids?: InputMaybe<Array<Scalars['Int']['input']>>;
|
||||||
|
name_pattern?: InputMaybe<Scalars['String']['input']>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -576,6 +607,13 @@ export interface MutationImportUserRightsArgs {
|
||||||
input: UserRightsInput;
|
input: UserRightsInput;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export interface MutationRunSmoketestArgs {
|
||||||
|
groupName: Scalars['String']['input'];
|
||||||
|
hostName: Scalars['String']['input'];
|
||||||
|
templateName: Scalars['String']['input'];
|
||||||
|
}
|
||||||
|
|
||||||
/** Operational data common to most devices. */
|
/** Operational data common to most devices. */
|
||||||
export interface OperationalDeviceData {
|
export interface OperationalDeviceData {
|
||||||
__typename?: 'OperationalDeviceData';
|
__typename?: 'OperationalDeviceData';
|
||||||
|
|
@ -744,6 +782,28 @@ export interface QueryUserPermissionsArgs {
|
||||||
objectNames?: InputMaybe<Array<Scalars['String']['input']>>;
|
objectNames?: InputMaybe<Array<Scalars['String']['input']>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Response object for the smoketest operation. */
|
||||||
|
export interface SmoketestResponse {
|
||||||
|
__typename?: 'SmoketestResponse';
|
||||||
|
/** Overall status message. */
|
||||||
|
message?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Detailed results for each step. */
|
||||||
|
steps: Array<SmoketestStep>;
|
||||||
|
/** True if all steps of the smoketest succeeded. */
|
||||||
|
success: Scalars['Boolean']['output'];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Results for a single step in the smoketest. */
|
||||||
|
export interface SmoketestStep {
|
||||||
|
__typename?: 'SmoketestStep';
|
||||||
|
/** Status message or error message for the step. */
|
||||||
|
message?: Maybe<Scalars['String']['output']>;
|
||||||
|
/** Name of the step (e.g. 'Create Template'). */
|
||||||
|
name: Scalars['String']['output'];
|
||||||
|
/** True if the step succeeded. */
|
||||||
|
success: Scalars['Boolean']['output'];
|
||||||
|
}
|
||||||
|
|
||||||
export enum SortOrder {
|
export enum SortOrder {
|
||||||
/** Deliver values in ascending order */
|
/** Deliver values in ascending order */
|
||||||
Asc = 'asc',
|
Asc = 'asc',
|
||||||
|
|
@ -1155,6 +1215,8 @@ export type ResolversTypes = {
|
||||||
Permission: Permission;
|
Permission: Permission;
|
||||||
PermissionRequest: PermissionRequest;
|
PermissionRequest: PermissionRequest;
|
||||||
Query: ResolverTypeWrapper<{}>;
|
Query: ResolverTypeWrapper<{}>;
|
||||||
|
SmoketestResponse: ResolverTypeWrapper<SmoketestResponse>;
|
||||||
|
SmoketestStep: ResolverTypeWrapper<SmoketestStep>;
|
||||||
SortOrder: SortOrder;
|
SortOrder: SortOrder;
|
||||||
StorageItemType: StorageItemType;
|
StorageItemType: StorageItemType;
|
||||||
String: ResolverTypeWrapper<Scalars['String']['output']>;
|
String: ResolverTypeWrapper<Scalars['String']['output']>;
|
||||||
|
|
@ -1227,6 +1289,8 @@ export type ResolversParentTypes = {
|
||||||
OperationalDeviceData: OperationalDeviceData;
|
OperationalDeviceData: OperationalDeviceData;
|
||||||
PermissionRequest: PermissionRequest;
|
PermissionRequest: PermissionRequest;
|
||||||
Query: {};
|
Query: {};
|
||||||
|
SmoketestResponse: SmoketestResponse;
|
||||||
|
SmoketestStep: SmoketestStep;
|
||||||
String: Scalars['String']['output'];
|
String: Scalars['String']['output'];
|
||||||
Template: Template;
|
Template: Template;
|
||||||
Time: Scalars['Time']['output'];
|
Time: Scalars['Time']['output'];
|
||||||
|
|
@ -1449,7 +1513,9 @@ export type LocationResolvers<ContextType = any, ParentType extends ResolversPar
|
||||||
};
|
};
|
||||||
|
|
||||||
export type MutationResolvers<ContextType = any, ParentType extends ResolversParentTypes['Mutation'] = ResolversParentTypes['Mutation']> = {
|
export type MutationResolvers<ContextType = any, ParentType extends ResolversParentTypes['Mutation'] = ResolversParentTypes['Mutation']> = {
|
||||||
createHost?: Resolver<Maybe<ResolversTypes['CreateHostResponse']>, ParentType, ContextType, RequireFields<MutationCreateHostArgs, 'host' | 'hostgroupids' | 'templateids'>>;
|
createHost?: Resolver<Maybe<ResolversTypes['CreateHostResponse']>, ParentType, ContextType, RequireFields<MutationCreateHostArgs, 'host' | 'hostgroupids'>>;
|
||||||
|
deleteHostGroups?: Resolver<Maybe<Array<ResolversTypes['DeleteResponse']>>, ParentType, ContextType, Partial<MutationDeleteHostGroupsArgs>>;
|
||||||
|
deleteHosts?: Resolver<Maybe<Array<ResolversTypes['DeleteResponse']>>, ParentType, ContextType, Partial<MutationDeleteHostsArgs>>;
|
||||||
deleteTemplateGroups?: Resolver<Maybe<Array<ResolversTypes['DeleteResponse']>>, ParentType, ContextType, Partial<MutationDeleteTemplateGroupsArgs>>;
|
deleteTemplateGroups?: Resolver<Maybe<Array<ResolversTypes['DeleteResponse']>>, ParentType, ContextType, Partial<MutationDeleteTemplateGroupsArgs>>;
|
||||||
deleteTemplates?: Resolver<Maybe<Array<ResolversTypes['DeleteResponse']>>, ParentType, ContextType, Partial<MutationDeleteTemplatesArgs>>;
|
deleteTemplates?: Resolver<Maybe<Array<ResolversTypes['DeleteResponse']>>, ParentType, ContextType, Partial<MutationDeleteTemplatesArgs>>;
|
||||||
importHostGroups?: Resolver<Maybe<Array<ResolversTypes['CreateHostGroupResponse']>>, ParentType, ContextType, RequireFields<MutationImportHostGroupsArgs, 'hostGroups'>>;
|
importHostGroups?: Resolver<Maybe<Array<ResolversTypes['CreateHostGroupResponse']>>, ParentType, ContextType, RequireFields<MutationImportHostGroupsArgs, 'hostGroups'>>;
|
||||||
|
|
@ -1457,6 +1523,7 @@ export type MutationResolvers<ContextType = any, ParentType extends ResolversPar
|
||||||
importTemplateGroups?: Resolver<Maybe<Array<ResolversTypes['CreateTemplateGroupResponse']>>, ParentType, ContextType, RequireFields<MutationImportTemplateGroupsArgs, 'templateGroups'>>;
|
importTemplateGroups?: Resolver<Maybe<Array<ResolversTypes['CreateTemplateGroupResponse']>>, ParentType, ContextType, RequireFields<MutationImportTemplateGroupsArgs, 'templateGroups'>>;
|
||||||
importTemplates?: Resolver<Maybe<Array<ResolversTypes['ImportTemplateResponse']>>, ParentType, ContextType, RequireFields<MutationImportTemplatesArgs, 'templates'>>;
|
importTemplates?: Resolver<Maybe<Array<ResolversTypes['ImportTemplateResponse']>>, ParentType, ContextType, RequireFields<MutationImportTemplatesArgs, 'templates'>>;
|
||||||
importUserRights?: Resolver<Maybe<ResolversTypes['ImportUserRightsResult']>, ParentType, ContextType, RequireFields<MutationImportUserRightsArgs, 'dryRun' | 'input'>>;
|
importUserRights?: Resolver<Maybe<ResolversTypes['ImportUserRightsResult']>, ParentType, ContextType, RequireFields<MutationImportUserRightsArgs, 'dryRun' | 'input'>>;
|
||||||
|
runSmoketest?: Resolver<ResolversTypes['SmoketestResponse'], ParentType, ContextType, RequireFields<MutationRunSmoketestArgs, 'groupName' | 'hostName' | 'templateName'>>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type OperationalDeviceDataResolvers<ContextType = any, ParentType extends ResolversParentTypes['OperationalDeviceData'] = ResolversParentTypes['OperationalDeviceData']> = {
|
export type OperationalDeviceDataResolvers<ContextType = any, ParentType extends ResolversParentTypes['OperationalDeviceData'] = ResolversParentTypes['OperationalDeviceData']> = {
|
||||||
|
|
@ -1488,6 +1555,20 @@ export type QueryResolvers<ContextType = any, ParentType extends ResolversParent
|
||||||
zabbixVersion?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
zabbixVersion?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type SmoketestResponseResolvers<ContextType = any, ParentType extends ResolversParentTypes['SmoketestResponse'] = ResolversParentTypes['SmoketestResponse']> = {
|
||||||
|
message?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||||
|
steps?: Resolver<Array<ResolversTypes['SmoketestStep']>, ParentType, ContextType>;
|
||||||
|
success?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType>;
|
||||||
|
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type SmoketestStepResolvers<ContextType = any, ParentType extends ResolversParentTypes['SmoketestStep'] = ResolversParentTypes['SmoketestStep']> = {
|
||||||
|
message?: Resolver<Maybe<ResolversTypes['String']>, ParentType, ContextType>;
|
||||||
|
name?: Resolver<ResolversTypes['String'], ParentType, ContextType>;
|
||||||
|
success?: Resolver<ResolversTypes['Boolean'], ParentType, ContextType>;
|
||||||
|
__isTypeOf?: IsTypeOfResolverFn<ParentType, ContextType>;
|
||||||
|
};
|
||||||
|
|
||||||
export type StorageItemTypeResolvers = EnumResolverSignature<{ FLOAT?: any, INT?: any, TEXT?: any }, ResolversTypes['StorageItemType']>;
|
export type StorageItemTypeResolvers = EnumResolverSignature<{ FLOAT?: any, INT?: any, TEXT?: any }, ResolversTypes['StorageItemType']>;
|
||||||
|
|
||||||
export type TemplateResolvers<ContextType = any, ParentType extends ResolversParentTypes['Template'] = ResolversParentTypes['Template']> = {
|
export type TemplateResolvers<ContextType = any, ParentType extends ResolversParentTypes['Template'] = ResolversParentTypes['Template']> = {
|
||||||
|
|
@ -1636,6 +1717,8 @@ export type Resolvers<ContextType = any> = {
|
||||||
OperationalDeviceData?: OperationalDeviceDataResolvers<ContextType>;
|
OperationalDeviceData?: OperationalDeviceDataResolvers<ContextType>;
|
||||||
Permission?: PermissionResolvers;
|
Permission?: PermissionResolvers;
|
||||||
Query?: QueryResolvers<ContextType>;
|
Query?: QueryResolvers<ContextType>;
|
||||||
|
SmoketestResponse?: SmoketestResponseResolvers<ContextType>;
|
||||||
|
SmoketestStep?: SmoketestStepResolvers<ContextType>;
|
||||||
StorageItemType?: StorageItemTypeResolvers;
|
StorageItemType?: StorageItemTypeResolvers;
|
||||||
Template?: TemplateResolvers<ContextType>;
|
Template?: TemplateResolvers<ContextType>;
|
||||||
Time?: GraphQLScalarType;
|
Time?: GraphQLScalarType;
|
||||||
|
|
|
||||||
|
|
@ -71,8 +71,8 @@ describe("HostImporter", () => {
|
||||||
// Mocking template lookup for deviceType
|
// Mocking template lookup for deviceType
|
||||||
(zabbixAPI.post as jest.Mock).mockResolvedValueOnce([{ templateid: "301" }]);
|
(zabbixAPI.post as jest.Mock).mockResolvedValueOnce([{ templateid: "301" }]);
|
||||||
|
|
||||||
// Mocking host.create via requestByPath
|
// Mocking host.create via post (called by ZabbixCreateHostRequest)
|
||||||
(zabbixAPI.requestByPath as jest.Mock).mockResolvedValueOnce({ hostids: ["401"] });
|
(zabbixAPI.post as jest.Mock).mockResolvedValueOnce({ hostids: ["401"] });
|
||||||
|
|
||||||
const result = await HostImporter.importHosts(hosts, "token");
|
const result = await HostImporter.importHosts(hosts, "token");
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -62,9 +62,8 @@ describe("Host Integration Tests", () => {
|
||||||
(zabbixAPI.post as jest.Mock)
|
(zabbixAPI.post as jest.Mock)
|
||||||
.mockResolvedValueOnce([{ groupid: "201", name: ZABBIX_EDGE_DEVICE_BASE_GROUP }]) // Base group
|
.mockResolvedValueOnce([{ groupid: "201", name: ZABBIX_EDGE_DEVICE_BASE_GROUP }]) // Base group
|
||||||
.mockResolvedValueOnce([{ groupid: "202", name: ZABBIX_EDGE_DEVICE_BASE_GROUP + "/ConstructionSite/Test" }]) // Specific group
|
.mockResolvedValueOnce([{ groupid: "202", name: ZABBIX_EDGE_DEVICE_BASE_GROUP + "/ConstructionSite/Test" }]) // Specific group
|
||||||
.mockResolvedValueOnce([{ templateid: "301" }]); // Template lookup
|
.mockResolvedValueOnce([{ templateid: "301" }]) // Template lookup
|
||||||
|
.mockResolvedValueOnce({ hostids: ["401"] }); // Host creation
|
||||||
(zabbixAPI.requestByPath as jest.Mock).mockResolvedValueOnce({ hostids: ["401"] });
|
|
||||||
|
|
||||||
const response = await server.executeOperation({
|
const response = await server.executeOperation({
|
||||||
query: mutation,
|
query: mutation,
|
||||||
|
|
|
||||||
95
src/test/template_link.test.ts
Normal file
95
src/test/template_link.test.ts
Normal file
|
|
@ -0,0 +1,95 @@
|
||||||
|
import {ApolloServer} from '@apollo/server';
|
||||||
|
import {schema_loader} from '../api/schema.js';
|
||||||
|
import {zabbixAPI} from '../datasources/zabbix-api.js';
|
||||||
|
|
||||||
|
// Mocking ZabbixAPI
|
||||||
|
jest.mock("../datasources/zabbix-api.js", () => ({
|
||||||
|
zabbixAPI: {
|
||||||
|
post: jest.fn(),
|
||||||
|
executeRequest: jest.fn(),
|
||||||
|
baseURL: 'http://localhost/zabbix',
|
||||||
|
requestByPath: jest.fn()
|
||||||
|
},
|
||||||
|
ZABBIX_EDGE_DEVICE_BASE_GROUP: "Roadwork"
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("Template Linking Tests", () => {
|
||||||
|
let server: ApolloServer;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const schema = await schema_loader();
|
||||||
|
server = new ApolloServer({
|
||||||
|
schema,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("createHost with templateNames", async () => {
|
||||||
|
const mutation = `
|
||||||
|
mutation CreateHost($host: String!, $hostgroupids: [Int!]!, $templateNames: [String!]!) {
|
||||||
|
createHost(host: $host, hostgroupids: $hostgroupids, templateNames: $templateNames) {
|
||||||
|
hostids
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const variables = {
|
||||||
|
host: "TestHost",
|
||||||
|
hostgroupids: [1],
|
||||||
|
templateNames: ["Test Template"]
|
||||||
|
};
|
||||||
|
|
||||||
|
(zabbixAPI.post as jest.Mock)
|
||||||
|
.mockResolvedValueOnce([{ templateid: "101", name: "Test Template" }]) // Template lookup
|
||||||
|
.mockResolvedValueOnce({ hostids: ["201"] }); // Host creation
|
||||||
|
|
||||||
|
const response = await server.executeOperation({
|
||||||
|
query: mutation,
|
||||||
|
variables: variables,
|
||||||
|
}, {
|
||||||
|
contextValue: { zabbixAuthToken: 'test-token', dataSources: { zabbixAPI: zabbixAPI } }
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.body.kind).toBe('single');
|
||||||
|
// @ts-ignore
|
||||||
|
const result = response.body.singleResult;
|
||||||
|
expect(result.errors).toBeUndefined();
|
||||||
|
expect(result.data.createHost.hostids).toContain(201);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("importHosts with templateids and templateNames", async () => {
|
||||||
|
const mutation = `
|
||||||
|
mutation ImportHosts($hosts: [CreateHost!]!) {
|
||||||
|
importHosts(hosts: $hosts) {
|
||||||
|
hostid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const variables = {
|
||||||
|
hosts: [{
|
||||||
|
deviceKey: "TestDevice",
|
||||||
|
deviceType: "TestType",
|
||||||
|
groupNames: ["TestGroup"],
|
||||||
|
templateids: [101],
|
||||||
|
templateNames: ["Another Template"]
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
|
||||||
|
(zabbixAPI.post as jest.Mock)
|
||||||
|
.mockResolvedValueOnce([{ groupid: "501", name: "Roadwork" }]) // Base group lookup
|
||||||
|
.mockResolvedValueOnce([{ groupid: "502", name: "Roadwork/TestGroup" }]) // Specific group lookup
|
||||||
|
.mockResolvedValueOnce([{ templateid: "102", name: "Another Template" }]) // Template lookup
|
||||||
|
.mockResolvedValueOnce({ hostids: ["202"] }); // Host creation
|
||||||
|
|
||||||
|
const response = await server.executeOperation({
|
||||||
|
query: mutation,
|
||||||
|
variables: variables,
|
||||||
|
}, {
|
||||||
|
contextValue: { zabbixAuthToken: 'test-token', dataSources: { zabbixAPI: zabbixAPI } }
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.body.kind).toBe('single');
|
||||||
|
// @ts-ignore
|
||||||
|
const result = response.body.singleResult;
|
||||||
|
expect(result.errors).toBeUndefined();
|
||||||
|
expect(result.data.importHosts[0].hostid).toBe("202");
|
||||||
|
});
|
||||||
|
});
|
||||||
Loading…
Add table
Add a link
Reference in a new issue