Initial commit: Extract base Zabbix GraphQl - API functionality from VCR Project and add dynamic schema samples
This commit is contained in:
commit
92ffe71684
42 changed files with 4234 additions and 0 deletions
194
src/api/resolver_helpers.ts
Normal file
194
src/api/resolver_helpers.ts
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
import {isObjectType} from "graphql";
|
||||
import {logger} from "../logging/logger.js";
|
||||
|
||||
/*
|
||||
As a default all . - seperators within a key shall be replaced by a Capital letter of the following word
|
||||
*/
|
||||
function defaultKeyMappingFunction(key: string): string {
|
||||
let words = key.split(".")
|
||||
for (let i = 1; i < words.length; i++) {
|
||||
if (words[i]) {
|
||||
words[i] = words[i][0].toUpperCase() + words[i].substring(1);
|
||||
}
|
||||
}
|
||||
return words.join("")
|
||||
}
|
||||
|
||||
export function createHierarchicalValueFieldResolver(
|
||||
schema: any, typename: string,
|
||||
sourceFieldMapper: (fieldname: string, parent: any, objectTypeRequested: boolean) => { [p: string]: any } | null): {
|
||||
[fieldname: string]: any
|
||||
} {
|
||||
let resolver: { [fieldname: string]: any } = {}
|
||||
|
||||
let type = schema.getType(typename)
|
||||
if (isObjectType(type)) {
|
||||
let fields = type.getFields();
|
||||
for (let fieldsKey in fields) {
|
||||
let field = fields[fieldsKey];
|
||||
resolver[field.name] = (parent: any) => sourceFieldMapper(field.name, parent, isObjectType(field.type));
|
||||
}
|
||||
}
|
||||
return resolver
|
||||
}
|
||||
|
||||
export function zabbixItemValueSourceFieldMapper(
|
||||
fieldname: string,
|
||||
parent: {
|
||||
items: [{ itemid: string, key_: string; name: string, lastvalue: any }],
|
||||
[key: string]: any
|
||||
},
|
||||
objectTypeRequested: boolean
|
||||
) {
|
||||
let result: { [p: string]: any; } | any = parent[fieldname]
|
||||
if (!parent.items) {
|
||||
logger.debug(`No parent.items found: ${JSON.stringify(parent)}`)
|
||||
return result
|
||||
}
|
||||
parent.items.forEach(
|
||||
item => {
|
||||
result = mapAttributeListToGraphQlType(result, objectTypeRequested, fieldname, {
|
||||
key: item.key_,
|
||||
value: item.lastvalue
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
logger.info(`Device data mapped: ${JSON.stringify(result)}`)
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function zabbixTagsValueSourceFieldMapper(
|
||||
fieldname: string,
|
||||
tags: [{ tag: string, value: any }],
|
||||
objectTypeRequested: boolean
|
||||
) {
|
||||
let result: { [p: string]: any; } | any = {}
|
||||
if (!tags) {
|
||||
logger.debug(`No parent.tags or parent.inheritedTags found: ${JSON.stringify(tags)}`)
|
||||
return result
|
||||
}
|
||||
tags.forEach(
|
||||
tag => {
|
||||
result = mapAttributeListToGraphQlType(result, objectTypeRequested, fieldname, {
|
||||
key: tag.tag,
|
||||
value: tag.value
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
logger.info(`Device tags mapped: ${JSON.stringify(result)}`)
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function mapAttributeListToGraphQlType(result: {
|
||||
[p: string]: any;
|
||||
} | any, objectTypeRequested: boolean, fieldname: string, item: { value: any, key: string }) {
|
||||
|
||||
logger.debug(`Resolving ${objectTypeRequested ? "attributes of object" : "value of scalar"} field parent.${fieldname} (${result}), looking up key from item ${JSON.stringify(item)}`)
|
||||
if (item.key) {
|
||||
if (objectTypeRequested) {
|
||||
function addRecursive(
|
||||
result: { [x: string]: any; } | null,
|
||||
fieldHierarchy: string[],
|
||||
value: any
|
||||
) {
|
||||
if (!fieldHierarchy || fieldHierarchy.length == 0) {
|
||||
return result
|
||||
|
||||
} else {
|
||||
if (!result) {
|
||||
result = {}
|
||||
}
|
||||
if (fieldHierarchy.length == 1) {
|
||||
let fieldTokenName = fieldHierarchy[0];
|
||||
const TOKEN_SEPERATOR = "_";
|
||||
|
||||
// As value is not typed we must parse the type in order to transform it to a strongly
|
||||
// typed value which is expected by Graphql
|
||||
// Example: Graphql does not accept a string "true" and empty string as false as boolean
|
||||
// In order to facilitate this it is possible (but not mandatory) to provide typehints
|
||||
// to item keys by prepending the fieldTokenName with a typehint, following by an underscore.
|
||||
// I.e. if a key is prefixed with str_, bool_, float_ or json_ this will be stripped
|
||||
// and the value will be cast to the appropriate type.
|
||||
// If no typeHint is provided and the type is string it will be tried to create a float
|
||||
// or a boolean out of it.
|
||||
let typeHintToken = fieldTokenName.split(TOKEN_SEPERATOR);
|
||||
let typeHint = undefined;
|
||||
if (typeHintToken.length > 0) {
|
||||
switch (typeHintToken[0]) {
|
||||
case "str":
|
||||
case "bool":
|
||||
case "float":
|
||||
case "json":
|
||||
typeHint = typeHintToken[0];
|
||||
// Remove typehint + token separator from field name - if the typehint
|
||||
// is followed by another token. If not (e.g. fieldTokenName="str" only) the
|
||||
// token is considered to be a valid typehint, but it is not stripped from the
|
||||
// fieldTokenName (i.e. nothing happens to the fieldTokenName in that case)
|
||||
if (typeHintToken.length > 1) {
|
||||
fieldTokenName = fieldTokenName.substring(typeHint.length + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
let fieldValue = undefined;
|
||||
|
||||
if (typeof value === 'string' && (typeHint == "bool" || value.toLowerCase() === 'true' || value.toLowerCase() === 'false')) {
|
||||
fieldValue = (value.toLowerCase() === 'true');
|
||||
// logger.debug("Parsing attribute '" + fieldTokenName + "' as true/false string, type=" + typeof fieldValue + ", value=" + fieldValue);
|
||||
} else if (typeof value === 'string' && value !== '' && !isNaN(Number(value))) {
|
||||
fieldValue = Number(value);
|
||||
// logger.debug("Parsing attribute '" + fieldTokenName + "' as number, type=" + typeof fieldValue + ", value=" + fieldValue);
|
||||
} else if (typeof value === 'string' && typeHint == "json") {
|
||||
logger.debug("Trying to parse attribute value as json, typeHint=" + typeHint + ", type=" + typeof value + ", value=" + value);
|
||||
if (!value) {
|
||||
// Empty string values will be considered to be an unset JSON-Object if there is a typeHin=="json"
|
||||
fieldValue = undefined;
|
||||
} else {
|
||||
try {
|
||||
fieldValue = JSON.parse(value);
|
||||
// logger.debug("Parsing attribute '" + fieldTokenName + "' as json, type=" + typeof fieldValue + ", value=" + value);
|
||||
} catch (e) {
|
||||
logger.debug("Unable to parse attribute value as json, passing unmodified, type=" + typeof value + ", value=" + value);
|
||||
fieldValue = value;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fieldValue = value;
|
||||
// logger.debug("Passing attribute '" + fieldTokenName + "' unmodified, type=" + typeof fieldValue + ", value=" + fieldValue);
|
||||
}
|
||||
if (fieldValue !== undefined) {
|
||||
// logger.debug(`length of parsed field hierarchy is 1: Setting fieldTokenName=${fieldTokenName} to ${value}`);
|
||||
result[fieldTokenName] = fieldValue;
|
||||
} /*else {
|
||||
// logger.debug(`length of parsed field hierarchy is 1: Skipping to set fieldHierarchy=${fieldHierarchy} to ${value} (empty value)`);
|
||||
}*/
|
||||
} else {
|
||||
result[fieldHierarchy[0]] = addRecursive(result[fieldHierarchy[0]], fieldHierarchy.slice(1), value)
|
||||
}
|
||||
}
|
||||
logger.debug(`Adding attribute ${fieldHierarchy[0]}, result: ${JSON.stringify(result)}`)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
let fieldHierarchy = item.key.split(".");
|
||||
if (fieldHierarchy[0] == fieldname) {
|
||||
result = addRecursive(result, fieldHierarchy.slice(1), item.value);
|
||||
logger.debug(`Detected matching item key ${fieldname} in item , result: ${JSON.stringify(result)}`)
|
||||
} else {
|
||||
logger.debug(`Item key ${fieldHierarchy[0]} not matched fieldname=${fieldname}, result: ${JSON.stringify(result)}`)
|
||||
}
|
||||
|
||||
} else {
|
||||
let keyInCamel = defaultKeyMappingFunction(item.key);
|
||||
if (keyInCamel == fieldname) {
|
||||
result = item.value
|
||||
logger.debug(`Detected matching item key ${keyInCamel} in item , result: ${JSON.stringify(result)}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
257
src/api/resolvers.ts
Normal file
257
src/api/resolvers.ts
Normal file
|
|
@ -0,0 +1,257 @@
|
|||
import {
|
||||
DeviceCommunicationType,
|
||||
DeviceStatus,
|
||||
MutationCreateHostArgs,
|
||||
MutationImportHostsArgs,
|
||||
MutationImportHostGroupsArgs,
|
||||
MutationImportUserRightsArgs,
|
||||
Permission,
|
||||
QueryAllHostsArgs,
|
||||
QueryAllHostGroupsArgs,
|
||||
QueryExportDeviceValueHistoryArgs,
|
||||
QueryExportUserRightsArgs,
|
||||
QueryHasPermissionsArgs,
|
||||
QueryUserPermissionsArgs,
|
||||
Resolvers,
|
||||
StorageItemType, Host,
|
||||
} from "../generated/graphql.js";
|
||||
|
||||
import {HostImporter} from "../execution/host_importer";
|
||||
import {HostValueExporter} from "../execution/host_exporter";
|
||||
import {logger} from "../logging/logger.js";
|
||||
import {ParsedArgs, ZabbixPermissionsHelper, ZabbixRequest} from "../datasources/zabbix-request.js";
|
||||
import {ZabbixCreateHostRequest, ZabbixQueryHostsRequestWithItemsAndInventory,} from "../datasources/zabbix-hosts.js";
|
||||
import {ZabbixQueryHostgroupsParams, ZabbixQueryHostgroupsRequest} from "../datasources/zabbix-hostgroups.js";
|
||||
import {
|
||||
ZabbixExportUserGroupArgs,
|
||||
ZabbixExportUserGroupsRequest,
|
||||
ZabbixImportUserGroupsParams,
|
||||
ZabbixImportUserGroupsRequest
|
||||
} from "../datasources/zabbix-usergroups.js";
|
||||
import {
|
||||
ZabbixImportUserRolesParams,
|
||||
ZabbixImportUserRolesRequest,
|
||||
ZabbixQueryUserRolesRequest
|
||||
} from "../datasources/zabbix-userroles.js";
|
||||
import {ZABBIX_EDGE_DEVICE_BASE_GROUP, zabbixAPI} from "../datasources/zabbix-api";
|
||||
import {GraphQLInterfaceType, GraphQLList} from "graphql/type";
|
||||
|
||||
|
||||
export function createResolvers(): Resolvers {
|
||||
// @ts-ignore
|
||||
// @ts-ignore
|
||||
return {
|
||||
Query: {
|
||||
userPermissions: async (_parent: any, objectNamesFilter: QueryUserPermissionsArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
return ZabbixPermissionsHelper.getUserPermissions(zabbixAPI, zabbixAuthToken, cookie, objectNamesFilter.objectNames)
|
||||
},
|
||||
hasPermissions: async (_parent: any, args: QueryHasPermissionsArgs, {zabbixAuthToken, cookie}: any) => {
|
||||
return ZabbixPermissionsHelper.hasUserPermissions(zabbixAPI, args, zabbixAuthToken, cookie)
|
||||
},
|
||||
locations: (_parent: any, args: Object, {dataSources, zabbixAuthToken}: any) => {
|
||||
return dataSources.zabbixAPI.getLocations(zabbixAuthToken, new ParsedArgs(args));
|
||||
},
|
||||
apiVersion: () => {
|
||||
return process.env.API_VERSION ?? "unknown"
|
||||
},
|
||||
zabbixVersion: async () => {
|
||||
return await new ZabbixRequest<string>("apiinfo.version").executeRequestThrowError(
|
||||
zabbixAPI)
|
||||
},
|
||||
login: async (_parent, args) => {
|
||||
return await new ZabbixRequest<any>("user.login").executeRequestThrowError(
|
||||
zabbixAPI, new ParsedArgs(args))
|
||||
},
|
||||
logout: async (_parent, _args, {zabbixAuthToken, cookie}: any) => {
|
||||
return await new ZabbixRequest<any>("user.logout", undefined, cookie).executeRequestThrowError(zabbixAPI);
|
||||
},
|
||||
|
||||
allHosts: async (_parent: any, args: QueryAllHostsArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie, dataSources
|
||||
}: any) => {
|
||||
args.tag_hostType ??= [ZABBIX_EDGE_DEVICE_BASE_GROUP];
|
||||
return await new ZabbixQueryHostsRequestWithItemsAndInventory(zabbixAuthToken, cookie)
|
||||
.executeRequestThrowError(
|
||||
dataSources.zabbixAPI, new ParsedArgs(args)
|
||||
)
|
||||
},
|
||||
|
||||
allHostGroups: async (_parent: any, args: QueryAllHostGroupsArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
if (!args.search_name) {
|
||||
args.search_name = ZABBIX_EDGE_DEVICE_BASE_GROUP + "/*"
|
||||
}
|
||||
return await new ZabbixQueryHostgroupsRequest(zabbixAuthToken, cookie).executeRequestThrowError(
|
||||
zabbixAPI, new ZabbixQueryHostgroupsParams(args)
|
||||
)
|
||||
},
|
||||
|
||||
exportDeviceValueHistory: (_parent: any, args: QueryExportDeviceValueHistoryArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
return HostValueExporter.exportDeviceData(args, zabbixAuthToken, cookie)
|
||||
},
|
||||
|
||||
exportUserRights: async (_, args: QueryExportUserRightsArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
let groups = await new ZabbixExportUserGroupsRequest(zabbixAuthToken, cookie)
|
||||
.executeRequestThrowError(zabbixAPI, new ZabbixExportUserGroupArgs(args.name_pattern, args.exclude_hostgroups_pattern));
|
||||
let roles = await new ZabbixQueryUserRolesRequest(zabbixAuthToken, cookie)
|
||||
.executeRequestThrowError(zabbixAPI, new ParsedArgs(args.name_pattern ? {name_pattern: args.name_pattern} : undefined));
|
||||
return {
|
||||
userGroups: groups,
|
||||
userRoles: roles
|
||||
}
|
||||
}
|
||||
},
|
||||
Mutation: {
|
||||
|
||||
createHost: async (_parent: any, args: MutationCreateHostArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
return await new ZabbixCreateHostRequest(zabbixAuthToken, cookie).executeRequestThrowError(
|
||||
zabbixAPI,
|
||||
new ParsedArgs(args)
|
||||
)
|
||||
},
|
||||
importHostGroups: async (_parent: any, args: MutationImportHostGroupsArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
return HostImporter.importHostGroups(args.hostGroups, zabbixAuthToken, cookie)
|
||||
},
|
||||
importHosts: async (_parent: any, args: MutationImportHostsArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
return HostImporter.importHosts(args.devices, zabbixAuthToken, cookie)
|
||||
},
|
||||
importUserRights: async (_, args: MutationImportUserRightsArgs, {
|
||||
zabbixAuthToken,
|
||||
cookie
|
||||
}: any) => {
|
||||
let userRoleImportArgs = structuredClone(args);
|
||||
let userGroupImportArgs = structuredClone(args);
|
||||
let userRolesImport = userRoleImportArgs.input.userRoles ?
|
||||
await new ZabbixImportUserRolesRequest(zabbixAuthToken, cookie)
|
||||
.executeRequestThrowError(zabbixAPI,
|
||||
new ZabbixImportUserRolesParams(userRoleImportArgs.input.userRoles, userRoleImportArgs.dryRun)) : null;
|
||||
let userGroupsImport = userGroupImportArgs.input.userGroups ?
|
||||
await new ZabbixImportUserGroupsRequest(zabbixAuthToken, cookie)
|
||||
.executeRequestThrowError(zabbixAPI,
|
||||
new ZabbixImportUserGroupsParams(userGroupImportArgs.input.userGroups, userGroupImportArgs.dryRun)) : null;
|
||||
return {
|
||||
userRoles: userRolesImport,
|
||||
userGroups: userGroupsImport
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Host: {
|
||||
// @ts-ignore
|
||||
__resolveType: function (host: Host, _context, info ): string {
|
||||
|
||||
const deviceType = host.deviceType ?? "";
|
||||
|
||||
if (deviceType) {
|
||||
logger.info(`checking host ${host.name} for deviceType - found ${deviceType}`);
|
||||
let interfaceType: GraphQLInterfaceType = (info.returnType instanceof GraphQLList ?
|
||||
info.returnType.ofType : info.returnType) as GraphQLInterfaceType
|
||||
if (info.schema.getImplementations(interfaceType).objects.some((impl: { name: string; }) => impl.name === deviceType)) {
|
||||
return deviceType;
|
||||
}
|
||||
return "GenericDevice"
|
||||
}
|
||||
|
||||
logger.info(`checking host ${host.name} for deviceType - no device type found, returning as ZabbixHost`);
|
||||
return "ZabbixHost"; // Return "generic" device host as a default if no templates are assigned
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
Inventory: {
|
||||
/*
|
||||
Always map inventory.location,... fields to location object
|
||||
*/
|
||||
// @ts-ignore
|
||||
location: (parent: { location: string; location_lon: string; location_lat: string; }) => {
|
||||
return {
|
||||
name: parent.location,
|
||||
longitude: parent.location_lon,
|
||||
latitude: parent.location_lat,
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
UserRoleRules: {
|
||||
ui_default_access: (parent: any) => {
|
||||
return parent["ui.default_access"]
|
||||
},
|
||||
modules_default_access: (parent: any) => {
|
||||
return parent["modules.default_access"]
|
||||
},
|
||||
actions_default_access: (parent: any) => {
|
||||
return parent["actions.default_access"]
|
||||
},
|
||||
api_access: (parent: any) => {
|
||||
return parent["api.access"]
|
||||
},
|
||||
api_mode: (parent: any) => {
|
||||
return parent["api.mode"]
|
||||
},
|
||||
},
|
||||
|
||||
// Enum Value Mappings
|
||||
Permission: {
|
||||
READ: Permission.Read,
|
||||
READ_WRITE: Permission.ReadWrite,
|
||||
DENY: Permission.Deny
|
||||
},
|
||||
|
||||
DeviceCommunicationType: {
|
||||
ZABBIX_AGENT: DeviceCommunicationType.ZABBIX_AGENT,
|
||||
ZABBIX_AGENT_ACTIVE: DeviceCommunicationType.ZABBIX_AGENT_ACTIVE,
|
||||
ZABBIX_TRAP: DeviceCommunicationType.ZABBIX_TRAP,
|
||||
SIMPLE_CHECK: DeviceCommunicationType.SIMPLE_CHECK,
|
||||
ZABBIX_INTERNAL_ITEM: DeviceCommunicationType.ZABBIX_INTERNAL_ITEM,
|
||||
DEPENDANT_ITEM: DeviceCommunicationType.DEPENDANT_ITEM,
|
||||
HTTP_AGENT: DeviceCommunicationType.HTTP_AGENT,
|
||||
SIMULATOR_CALCULATED: DeviceCommunicationType.SIMULATOR_CALCULATED,
|
||||
SNMP_AGENT: DeviceCommunicationType.SNMP_AGENT,
|
||||
SNMP_TRAP: DeviceCommunicationType.SNMP_TRAP,
|
||||
IPMI_AGENT: DeviceCommunicationType.IPMI_AGENT,
|
||||
JMX_AGENT: DeviceCommunicationType.JMX_AGENT,
|
||||
SIMULATOR_JAVASCRIPT: DeviceCommunicationType.SIMULATOR_JAVASCRIPT,
|
||||
DATABASE_MONITOR: DeviceCommunicationType.DATABASE_MONITOR,
|
||||
},
|
||||
DeviceStatus: {
|
||||
ENABLED: DeviceStatus.ENABLED,
|
||||
DISABLED: DeviceStatus.DISABLED
|
||||
},
|
||||
|
||||
SensorValueType: {
|
||||
NUMERIC: 0,
|
||||
CHARACTER: 1,
|
||||
LOG: 2,
|
||||
NUMERIC_UNSIGNED: 3,
|
||||
TEXT: 4
|
||||
},
|
||||
StorageItemType: {
|
||||
TEXT: StorageItemType.Text,
|
||||
FLOAT: StorageItemType.Float,
|
||||
INT: StorageItemType.Int,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
59
src/api/schema.ts
Normal file
59
src/api/schema.ts
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import {
|
||||
createHierarchicalValueFieldResolver,
|
||||
zabbixItemValueSourceFieldMapper,
|
||||
zabbixTagsValueSourceFieldMapper
|
||||
} from "./resolver_helpers.js";
|
||||
import {makeExecutableSchema, mergeSchemas} from "@graphql-tools/schema";
|
||||
import {readFileSync} from "fs";
|
||||
import {GraphQLSchema} from "graphql/type";
|
||||
import {createResolvers} from "./resolvers.js";
|
||||
|
||||
|
||||
const createZabbixHierarchicalDeviceFieldResolver =
|
||||
(typename: string, schema: any, additionalMappings: { [p: string]: any } = {}) => {
|
||||
return {
|
||||
...createHierarchicalValueFieldResolver(schema, typename, zabbixItemValueSourceFieldMapper),
|
||||
...additionalMappings
|
||||
}
|
||||
}
|
||||
const createZabbixHierarchicalDeviceTagsResolver =
|
||||
(typename: string, schema: any, additionalMappings: { [p: string]: any } = {}) => {
|
||||
return {
|
||||
...createHierarchicalValueFieldResolver(schema, typename, zabbixTagsValueSourceFieldMapper),
|
||||
...additionalMappings
|
||||
}
|
||||
}
|
||||
export async function schema_loader(): Promise<GraphQLSchema> {
|
||||
const resolvers = createResolvers();
|
||||
let typeDefs: string = readFileSync('./schema.graphql', {encoding: 'utf-8'});
|
||||
if (process.env.ADDITIONAL_SCHEMAS) {
|
||||
for (const schema of process.env.ADDITIONAL_SCHEMAS.split(",")){
|
||||
typeDefs += readFileSync(schema, {encoding: 'utf-8'});
|
||||
}
|
||||
}
|
||||
|
||||
let originalSchema =
|
||||
makeExecutableSchema({
|
||||
typeDefs,
|
||||
resolvers,
|
||||
});
|
||||
let additionalMappings = {
|
||||
tags: (parent: { tags: any; inheritedTags: any }) => {
|
||||
return (parent.tags || []).concat(parent.inheritedTags || [])
|
||||
}
|
||||
}
|
||||
let genericResolvers: Record<string, any> = {
|
||||
Device: createZabbixHierarchicalDeviceFieldResolver("Device", originalSchema,additionalMappings ),
|
||||
GenericDevice: createZabbixHierarchicalDeviceFieldResolver("GenericDevice", originalSchema, additionalMappings),
|
||||
}
|
||||
if (process.env.ADDITIONAL_RESOLVERS) {
|
||||
for (const resolver of process.env.ADDITIONAL_RESOLVERS.split(",")){
|
||||
genericResolvers[resolver] = createZabbixHierarchicalDeviceFieldResolver(resolver, originalSchema, additionalMappings)
|
||||
}
|
||||
}
|
||||
return mergeSchemas({
|
||||
schemas: [originalSchema],
|
||||
// TODO Generate resolvers for all schema types with @generateZabbix directive automatically
|
||||
resolvers: genericResolvers
|
||||
});
|
||||
}
|
||||
98
src/api/start.ts
Normal file
98
src/api/start.ts
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
import http from "http";
|
||||
import {schema_loader} from "./schema.js";
|
||||
import {GraphQLSchema} from "graphql/type";
|
||||
import {ApolloServer} from "@apollo/server";
|
||||
import {expressMiddleware} from '@as-integrations/express4';
|
||||
import express from 'express';
|
||||
|
||||
import cors from "cors";
|
||||
import {ApolloServerPluginDrainHttpServer} from '@apollo/server/plugin/drainHttpServer';
|
||||
import {logger} from "../logging/logger.js";
|
||||
import {zabbixAPI, zabbixRequestAuthToken} from "../datasources/zabbix-api";
|
||||
import {WebSocketServer} from "ws";
|
||||
import {useServer} from "graphql-ws/lib/use/ws";
|
||||
|
||||
const GRAPHQL_PATH = "/"
|
||||
const GRAPHQL_PORT = 4000
|
||||
|
||||
export function startAPi() {
|
||||
startApolloServer().then(
|
||||
r => {
|
||||
logger.info(`🚀 API ready at http://localhost:` + GRAPHQL_PORT + GRAPHQL_PATH);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
async function startApolloServer() {
|
||||
return schema_loader().then(async (executableSchema: GraphQLSchema) => {
|
||||
// Required logic for integrating with Express
|
||||
const app = express();
|
||||
// Our httpServer handles incoming requests to our Express app.
|
||||
// Below, we tell Apollo Server to "drain" this httpServer,
|
||||
// enabling our servers to shut down gracefully.
|
||||
const httpServer = http.createServer(app);
|
||||
|
||||
const wsServer = new WebSocketServer({
|
||||
// This is the `httpServer` we created in a previous step.
|
||||
server: httpServer,
|
||||
// Pass a different path here if app.use
|
||||
// serves expressMiddleware at a different path
|
||||
path: GRAPHQL_PATH,
|
||||
});
|
||||
|
||||
// Hand in the schema we just created and have the
|
||||
// WebSocketServer start listening.
|
||||
const serverCleanup = useServer({schema: executableSchema}, wsServer);
|
||||
const server: ApolloServer = new ApolloServer({
|
||||
schema: executableSchema,
|
||||
plugins: [
|
||||
// Proper shutdown for the HTTP server.
|
||||
ApolloServerPluginDrainHttpServer({httpServer}),
|
||||
|
||||
// Proper shutdown for the WebSocket server.
|
||||
{
|
||||
async serverWillStart() {
|
||||
return {
|
||||
async drainServer() {
|
||||
await serverCleanup.dispose();
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
|
||||
await server.start();
|
||||
|
||||
|
||||
// Set up our Express middleware to handle CORS, body parsing,
|
||||
// and our expressMiddleware function.
|
||||
app.use(
|
||||
GRAPHQL_PATH,
|
||||
cors<cors.CorsRequest>(),
|
||||
express.json(),
|
||||
// expressMiddleware accepts the same arguments:
|
||||
// an Apollo Server instance and optional configuration options
|
||||
expressMiddleware(server, {
|
||||
context: async ({req}) => {
|
||||
const {cache} = server;
|
||||
return {
|
||||
cache,
|
||||
dataSources: {
|
||||
zabbixAPI: zabbixAPI,
|
||||
},
|
||||
zabbixAuthToken: req.headers["zabbix-auth-token"] ?? zabbixRequestAuthToken,
|
||||
cookie: req.headers.cookie,
|
||||
token: req.headers.token
|
||||
};
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Modified server startup
|
||||
await new Promise<void>((resolve) => httpServer.listen({port: GRAPHQL_PORT}, resolve));
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue