{ "type": "REST", "operations": [ { "name": "Validate", "key": "validate", "inputParameters": [ { "name": "text", "displayName": "Content to check", "type": "string", "required": true, "bodyLocation": "llmOutput" }, { "name": "numReasks", "type": "integer", "defaultValue": "0", "bodyLocation": "numReasks" } ], "outputParameters": [ { "name": "passed", "displayName": "Passed", "type": "boolean", "applyMappingConfig": "always" }, { "name": "reason", "displayName": "Reason", "type": "string", "applyMappingConfig": "always" } ], "config": { "method": "POST", "url": "guards/content-safety/validate", "httpResponseHandler": { "type": "script", "implementation": { "language": "javascript", "script": "var statusCode = flwHttpResponse.getStatusCode();\nvar result = flw.json.createObject();\n\nif(statusCode === 401 || statusCode === 403) {\n throw 'Guardrails AI authentication failed (HTTP ' + statusCode + '). Verify the API key is configured correctly.';\n}\n\nif(statusCode === 400) {\n var errorJson = flw.json.stringToJson(flwHttpResponse.getBody());\n var detail = errorJson.path('detail').asString();\n result.putBoolean('passed', false);\n result.putString('reason', flw.string.hasText(detail) ? detail : 'Validation failed');\n flwHttpResponse.setStatusCode(200);\n flwHttpResponse.setBody(flw.json.jsonToString(result));\n} else if(statusCode >= 200 && statusCode < 300) {\n result.putBoolean('passed', true);\n result.putString('reason', '');\n flwHttpResponse.setBody(flw.json.jsonToString(result));\n} else {\n throw 'Guardrails AI API returned HTTP ' + statusCode + ': ' + flwHttpResponse.getBody();\n}" } } }, "type": "search" } ], "config": { "baseUrl": "http://localhost:8000" }, "name": "Guardrails AI Guardrail", "key": "guardrailsAiGuardrail", "description": "Modular content validation using Guardrails AI. Supports 70+ validators from the Guardrails Hub including toxicity, PII, prompt injection, and custom validators." }