From f4967874ef380f34231a8dbe480dd9a9104921a8 Mon Sep 17 00:00:00 2001 From: Sergey Petrov Date: Sat, 9 Jul 2022 00:06:55 +0000 Subject: [PATCH] Release 1.30.0-1 --- .gitlab-ci.yml | 2 +- CHANGELOG.md | 9 + SUPPORT.md | 7 +- contributing/README.md | 2 +- contributing/process_release.md | 2 + docs/conf.py | 2 +- docs/revision-history.rst | 2 +- docs/setting-up-consumer.rst | 4 +- .../system_custom_endpoints_with_snmp.json | 5 + package-lock.json | 1034 +++++--- package.json | 16 +- scripts/build/publishRpm.sh | 36 - .../consumers/Azure_Log_Analytics/index.js | 33 +- src/lib/consumers/shared/azureUtil.js | 110 +- src/lib/consumers/shared/metricsUtil.js | 43 +- src/lib/systemStats.js | 16 +- src/lib/utils/metrics.js | 49 + src/lib/utils/normalize.js | 29 +- src/schema/1.30.0/actions_schema.json | 187 ++ src/schema/1.30.0/base_schema.json | 310 +++ src/schema/1.30.0/consumer_schema.json | 1386 +++++++++++ src/schema/1.30.0/controls_schema.json | 52 + src/schema/1.30.0/endpoints_schema.json | 190 ++ src/schema/1.30.0/ihealth_poller_schema.json | 238 ++ src/schema/1.30.0/listener_schema.json | 85 + src/schema/1.30.0/namespace_schema.json | 92 + src/schema/1.30.0/pull_consumer_schema.json | 101 + src/schema/1.30.0/shared_schema.json | 50 + src/schema/1.30.0/system_poller_schema.json | 242 ++ src/schema/1.30.0/system_schema.json | 121 + src/schema/latest/base_schema.json | 4 +- src/schema/latest/endpoints_schema.json | 13 + test/README.md | 15 +- test/customMochaReporter.js | 20 +- test/functional/cloud/awsTests.js | 292 ++- test/functional/cloud/azureTests.js | 245 +- test/functional/consumerSystemTests.js | 197 +- .../azureApplicationInsightsTests.js | 215 +- .../consumersTests/azureLogAnalyticsTests.js | 198 +- .../consumersTests/defaultPullConsumer.js | 94 + .../consumersTests/elasticsearchTests.js | 445 ++-- .../functional/consumersTests/f5CloudTests.js | 366 +-- .../functional/consumersTests/fluentdTests.js | 290 ++- .../googleCloudMonitoringTests.js | 195 +- test/functional/consumersTests/kafkaTests.js | 443 +++- .../openTelemetryExporterTests.js | 304 ++- .../consumersTests/prometheusTests.js | 126 + test/functional/consumersTests/splunkTests.js | 709 +++--- test/functional/consumersTests/statsdTests.js | 389 +-- .../deployment/example_harness_facts.json | 21 +- test/functional/dutTests.js | 1324 +++++------ test/functional/pullConsumerSystemTests.js | 73 - .../pullConsumersTests/defaultTests.js | 99 - .../pullConsumersTests/prometheusTests.js | 124 - test/functional/shared/azureUtil.js | 70 - test/functional/shared/cloudUtils/aws.js | 146 ++ test/functional/shared/cloudUtils/azure.js | 276 +++ test/functional/shared/cloudUtils/gcp.js | 153 ++ .../shared/connectors/dockerConnector.js | 375 +++ .../shared/connectors/telemetryConnector.js | 255 ++ test/functional/shared/constants.js | 172 +- .../shared/{ => data/declarations}/basic.json | 0 .../declarations}/basic_namespace.json | 0 .../declarations}/filter_system_poller.json | 0 .../declarations}/pull_consumer_basic.json | 0 .../pull_consumer_with_namespace.json | 0 .../system_poller_chained_actions.json | 0 .../system_poller_endpointlist.json | 0 .../system_poller_matched_filtering.json | 0 .../system_poller_snmp_metrics.json | 38 + test/functional/shared/harness.js | 446 ++++ .../shared/remoteHost/appLXConnector.js | 273 +++ .../shared/remoteHost/f5BigDevice.js | 113 + .../shared/remoteHost/httpConnector.js | 274 +++ .../shared/remoteHost/icontrolAPI.js | 168 ++ .../shared/remoteHost/icontrolConnector.js | 326 +++ test/functional/shared/remoteHost/index.js | 17 + .../shared/remoteHost/remoteDevice.js | 152 ++ .../shared/remoteHost/remoteHost.js | 41 + .../shared/remoteHost/sshConnector.js | 851 +++++++ .../shared/remoteHost/tcpConnector.js | 522 ++++ .../shared/remoteHost/udpConnector.js | 336 +++ test/functional/shared/testUtils/index.js | 292 +++ test/functional/shared/util.js | 538 ----- test/functional/shared/utils/logger.js | 59 + test/functional/shared/utils/misc.js | 221 ++ test/functional/shared/utils/promise.js | 306 +++ test/functional/shared/utils/request.js | 251 ++ test/functional/testRunner.js | 81 +- .../azureLogAnalyticsConsumerTests.js | 18 + test/unit/consumers/azureUtilTests.js | 327 ++- .../azureLogAnalyticsConsumerTestsData.js | 2097 +++++++++++++++++ test/unit/consumers/statsdConsumerTests.js | 4 +- .../normalizeDeclarationEndpointsTestsData.js | 63 +- ...rmalizeDeclarationSystemPollerTestsData.js | 20 +- .../normalizeDeclarationSystemTestsData.js | 3 +- test/unit/data/customEndpointsTestsData.js | 57 +- test/unit/declarationTests.js | 120 +- test/unit/utils/metricsTests.js | 115 + test/unit/utils/normalizeTests.js | 43 +- test/winstonLogger.js | 53 +- versions.json | 2 +- 102 files changed, 16201 insertions(+), 4129 deletions(-) delete mode 100644 scripts/build/publishRpm.sh create mode 100644 src/lib/utils/metrics.js create mode 100644 src/schema/1.30.0/actions_schema.json create mode 100644 src/schema/1.30.0/base_schema.json create mode 100644 src/schema/1.30.0/consumer_schema.json create mode 100644 src/schema/1.30.0/controls_schema.json create mode 100644 src/schema/1.30.0/endpoints_schema.json create mode 100644 src/schema/1.30.0/ihealth_poller_schema.json create mode 100644 src/schema/1.30.0/listener_schema.json create mode 100644 src/schema/1.30.0/namespace_schema.json create mode 100644 src/schema/1.30.0/pull_consumer_schema.json create mode 100644 src/schema/1.30.0/shared_schema.json create mode 100644 src/schema/1.30.0/system_poller_schema.json create mode 100644 src/schema/1.30.0/system_schema.json create mode 100644 test/functional/consumersTests/defaultPullConsumer.js create mode 100644 test/functional/consumersTests/prometheusTests.js delete mode 100644 test/functional/pullConsumerSystemTests.js delete mode 100644 test/functional/pullConsumersTests/defaultTests.js delete mode 100644 test/functional/pullConsumersTests/prometheusTests.js delete mode 100644 test/functional/shared/azureUtil.js create mode 100644 test/functional/shared/cloudUtils/aws.js create mode 100644 test/functional/shared/cloudUtils/azure.js create mode 100644 test/functional/shared/cloudUtils/gcp.js create mode 100644 test/functional/shared/connectors/dockerConnector.js create mode 100644 test/functional/shared/connectors/telemetryConnector.js rename test/functional/shared/{ => data/declarations}/basic.json (100%) rename test/functional/shared/{ => data/declarations}/basic_namespace.json (100%) rename test/functional/shared/{ => data/declarations}/filter_system_poller.json (100%) rename test/functional/shared/{ => data/declarations}/pull_consumer_basic.json (100%) rename test/functional/shared/{ => data/declarations}/pull_consumer_with_namespace.json (100%) rename test/functional/shared/{ => data/declarations}/system_poller_chained_actions.json (100%) rename test/functional/shared/{ => data/declarations}/system_poller_endpointlist.json (100%) rename test/functional/shared/{ => data/declarations}/system_poller_matched_filtering.json (100%) create mode 100644 test/functional/shared/data/declarations/system_poller_snmp_metrics.json create mode 100644 test/functional/shared/harness.js create mode 100644 test/functional/shared/remoteHost/appLXConnector.js create mode 100644 test/functional/shared/remoteHost/f5BigDevice.js create mode 100644 test/functional/shared/remoteHost/httpConnector.js create mode 100644 test/functional/shared/remoteHost/icontrolAPI.js create mode 100644 test/functional/shared/remoteHost/icontrolConnector.js create mode 100644 test/functional/shared/remoteHost/index.js create mode 100644 test/functional/shared/remoteHost/remoteDevice.js create mode 100644 test/functional/shared/remoteHost/remoteHost.js create mode 100644 test/functional/shared/remoteHost/sshConnector.js create mode 100644 test/functional/shared/remoteHost/tcpConnector.js create mode 100644 test/functional/shared/remoteHost/udpConnector.js create mode 100644 test/functional/shared/testUtils/index.js delete mode 100644 test/functional/shared/util.js create mode 100644 test/functional/shared/utils/logger.js create mode 100644 test/functional/shared/utils/misc.js create mode 100644 test/functional/shared/utils/promise.js create mode 100644 test/functional/shared/utils/request.js create mode 100644 test/unit/utils/metricsTests.js diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b9e260c6..166f96a2 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,7 @@ include: ############################################################## # # -# Jobs and commands templates # +# Jobs and commands templates # # # ############################################################## .install_unittest_packages_cmd: &install_unittest_packages_cmd diff --git a/CHANGELOG.md b/CHANGELOG.md index db11ad03..06f5d9cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog Changes to this project are documented in this file. More detail and links can be found in the Telemetry Streaming [Document Revision History](https://clouddocs.f5.com/products/extensions/f5-telemetry-streaming/latest/revision-history.html). +## 1.30.0 +### Added +### Fixed +- TS-668: [GitHub #207](https://github.com/F5Networks/f5-telemetry-streaming/issues/207) Add support for numerical enums, enhance SNMP metrics processing +- TS-666: Created separate Azure Logs tables for pool members and removed that data from the pool tables. +### Changed +- TS-658: Update npm packages (@grpc/proto-loader from 0.6.9 to 0.6.12) +### Removed + ## 1.29.0 ### Added - TS-568: Added support for SNMP endpoints in Telemetry_Endpoints diff --git a/SUPPORT.md b/SUPPORT.md index 14812918..4f3a91c6 100644 --- a/SUPPORT.md +++ b/SUPPORT.md @@ -17,11 +17,9 @@ Currently supported versions: | Software Version | Release Type | First Customer Ship | End of Support | |------------------|---------------|---------------------|-----------------| -| TS 1.20.1 | LTS | 30-Jun-2021 | 30-Jun-2022 | -| TS 1.27.0 | Feature | 08-Mar-2022 | 08-Jun-2022 | -| TS 1.27.1 | LTS | 19-Apr-2022 | 19-Apr-2023 | | TS 1.28.0 | Feature | 19-Apr-2022 | 19-Jul-2022 | | TS 1.29.0 | Feature | 31-May-2022 | 31-Aug-2022 | +| TS 1.30.0 | Feature | 15-Jul-2022 | 15-Oct-2022 | Versions no longer supported: @@ -47,11 +45,14 @@ Versions no longer supported: | TS 1.18.0 | Feature | 23-Feb-2021 | 23-May-2021 | | TS 1.19.0 | Feature | 06-Apr-2021 | 06-Jul-2021 | | TS 1.20.0 | Feature | 18-May-2021 | 18-Aug-2021 | +| TS 1.20.1 | LTS | 30-Jun-2021 | 30-Jun-2022 | | TS 1.21.0 | Feature | 28-Jun-2021 | 28-Sep-2021 | | TS 1.22.0 | Feature | 09-Aug-2021 | 09-Nov-2021 | | TS 1.23.0 | Feature | 21-Sep-2021 | 21-Dec-2021 | | TS 1.24.0 | Feature | 02-Nov-2021 | 02-Feb-2022 | | TS 1.25.0 | Feature | 14-Dec-2021 | 14-Mar-2022 | | TS 1.26.0 | Feature | 25-Jan-2022 | 25-Apr-2022 | +| TS 1.27.0 | Feature | 08-Mar-2022 | 08-Jun-2022 | +| TS 1.27.1 | LTS | 19-Apr-2022 | 19-Apr-2023 | See the [Release notes](https://github.com/F5Networks/f5-telemetry-streaming/releases) and [Telemetry Streaming documentation](https://clouddocs.f5.com/products/extensions/f5-telemetry-streaming/latest/revision-history.html) for new features and issues resolved for each release. diff --git a/contributing/README.md b/contributing/README.md index 110c45a0..263ec792 100644 --- a/contributing/README.md +++ b/contributing/README.md @@ -108,7 +108,7 @@ How does the project handle a typical `POST` request? "trace": false, "format": "default" }, - "schemaVersion": "1.29.0" + "schemaVersion": "1.30.0" } } ``` diff --git a/contributing/process_release.md b/contributing/process_release.md index 3bb14f0e..73c7097e 100644 --- a/contributing/process_release.md +++ b/contributing/process_release.md @@ -65,6 +65,7 @@ * 1.27.0 - 17.7 MB * 1.28.0 - 17.7 MB * 1.29.0 - 17.8 MB + * 1.30.0 - 16.0 MB * Install build to BIG-IP, navigate to folder `/var/config/rest/iapps/f5-telemetry/` and check following: * Run `du -sh` and check that folder's size (shouldn't be much greater than previous versions): * 1.4.0 - 65 MB @@ -93,6 +94,7 @@ * 1.27.0 - 127 MB * 1.28.0 - 127 MB * 1.29.0 - 129 MB + * 1.30.0 - 116 MB * Check `node_modules` folder - if you see `eslint`, `mocha` or something else from [package.json](package.json) `devDependencies` section - something wrong with build process. Probably some `npm` flags are work as not expected and it MUST BE FIXED before publishing. * Ensure that all tests (unit tests and functional tests passed) * Optional: Ensure that your local tags match remote. If not, remove all and re-fetch: diff --git a/docs/conf.py b/docs/conf.py index 75ef95ac..9692c002 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -79,7 +79,7 @@ # The short X.Y version. version = u'' # The full version, including alpha/beta/rc tags. -release = u'1.29.0' +release = u'1.30.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/revision-history.rst b/docs/revision-history.rst index fd87e3b4..ee21b899 100644 --- a/docs/revision-history.rst +++ b/docs/revision-history.rst @@ -12,7 +12,7 @@ Document Revision History - Date * - 1.29.0 - - **Updated the documentation for Telemetry Streaming v1.29.0. This release contains the following changes:** |br| * Added support for querying SNMP using a custom endpoint (see :ref:`SNMP example`) |br| * Added outputMode parameter to Generic HTTP consumer to support raw data output (see :ref:`Generic HTTP`) |br| |br| Issues Resolved: |br| * Metric Consumers should not fail when 'null' found in data |br| * Prometheus consumer should ignore NaN values + - Updated the documentation for Telemetry Streaming v1.29.0. This release contains the following changes: |br| * Added support for querying SNMP using a custom endpoint (see :ref:`SNMP example`) |br| * Added outputMode parameter to Generic HTTP consumer to support raw data output (see :ref:`Generic HTTP`) |br| |br| Issues Resolved: |br| * Metric Consumers should not fail when 'null' found in data |br| * Prometheus consumer should ignore NaN values - 5-31-22 * - 1.28.0 diff --git a/docs/setting-up-consumer.rst b/docs/setting-up-consumer.rst index b80a1785..04426a35 100644 --- a/docs/setting-up-consumer.rst +++ b/docs/setting-up-consumer.rst @@ -114,8 +114,8 @@ Microsoft Azure Log Analytics |azure_img| Required Information: - - Workspace ID: Navigate to :guilabel:`Log Analytics workspace > Advanced Settings > Connected Sources`. - - Shared Key: Navigate to :guilabel:`Log Analytics workspace > Advanced Settings > Connected Sources` and use the primary key. + - Workspace ID: Navigate to :guilabel:`Log Analytics workspace > [your workspace] > Agents Management > Workspace ID`. + - Shared Key: Navigate to :guilabel:`Log Analytics workspace > [your workspace] > Agents Management > Primary key`. .. IMPORTANT:: The Azure Log Analytics Consumer only supports sending 500 items. Each configuration item (such as virtual server, pool, node) uses part of this limit. diff --git a/examples/declarations/system_custom_endpoints_with_snmp.json b/examples/declarations/system_custom_endpoints_with_snmp.json index 57deeb16..0102aa81 100644 --- a/examples/declarations/system_custom_endpoints_with_snmp.json +++ b/examples/declarations/system_custom_endpoints_with_snmp.json @@ -11,6 +11,11 @@ "totalMemory": { "protocol": "snmp", "path": "sysGlobalStat.sysStatMemoryTotal" + }, + "hrFSBootable": { + "protocol": "snmp", + "path": "hrFSBootable.1", + "numericalEnums": true } } }, diff --git a/package-lock.json b/package-lock.json index 3cc75adf..56fc7303 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "f5-telemetry", - "version": "1.29.0-1", + "version": "1.30.0-1", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -46,7 +46,6 @@ "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz", "integrity": "sha512-2IAnmn8zbvC/jKYhq5Ki9I+DwjlrtMPUCH/CpHvqI4dNnlwHwsxoIhlc8WcYY5LSYknXQtAlFYuHfqAFCvQ4Wg==", - "optional": true, "requires": { "@babel/highlight": "^7.14.5" } @@ -55,7 +54,6 @@ "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.15.8.tgz", "integrity": "sha512-ECmAKstXbp1cvpTTZciZCgfOt6iN64lR0d+euv3UZisU5awfRawOvg07Utn/qBGuH4bRIEZKrA/4LzZyXhZr8g==", - "optional": true, "requires": { "@babel/types": "^7.15.6", "jsesc": "^2.5.1", @@ -94,14 +92,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/highlight": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.5", "chalk": "^2.0.0", @@ -111,14 +107,12 @@ "@babel/parser": { "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.15.8.tgz", - "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==", - "optional": true + "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==" }, "@babel/template": { "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.15.4.tgz", "integrity": "sha512-UgBAfEa1oGuYgDIPM2G+aHa4Nlo9Lh6mGD2bDBGMTbYnc38vulXPuC1MGjYILIEmlwl6Rd+BPR9ee3gm20CBtg==", - "optional": true, "requires": { "@babel/code-frame": "^7.14.5", "@babel/parser": "^7.15.4", @@ -146,7 +140,6 @@ "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -184,7 +177,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.15.4.tgz", "integrity": "sha512-QwrtdNvUNsPCj2lfNQacsGSQvGX8ee1ttrBrcozUP2Sv/jylewBP/8QFe6ZkBsC8T/GYWonNAWJV4aRR9AL2DA==", - "optional": true, "requires": { "@babel/types": "^7.15.4" }, @@ -192,14 +184,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/types": { "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -231,7 +221,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.15.4.tgz", "integrity": "sha512-7ZmzFi+DwJx6A7mHRwbuucEYpyBwmh2Ca0RvI6z2+WLZYCqV0JOaLb+u0zbtmDicebgKBZgqbYfLaKNqSgv5Pw==", - "optional": true, "requires": { "@babel/helper-annotate-as-pure": "^7.15.4", "@babel/helper-function-name": "^7.15.4", @@ -245,7 +234,6 @@ "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz", "integrity": "sha512-2IAnmn8zbvC/jKYhq5Ki9I+DwjlrtMPUCH/CpHvqI4dNnlwHwsxoIhlc8WcYY5LSYknXQtAlFYuHfqAFCvQ4Wg==", - "optional": true, "requires": { "@babel/highlight": "^7.14.5" } @@ -254,7 +242,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.15.4.tgz", "integrity": "sha512-Z91cOMM4DseLIGOnog+Z8OI6YseR9bua+HpvLAQ2XayUGU+neTtX+97caALaLdyu53I/fjhbeCnWnRH1O3jFOw==", - "optional": true, "requires": { "@babel/helper-get-function-arity": "^7.15.4", "@babel/template": "^7.15.4", @@ -265,7 +252,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.15.4.tgz", "integrity": "sha512-1/AlxSF92CmGZzHnC515hm4SirTxtpDnLEJ0UyEMgTMZN+6bxXKg04dKhiRx5Enel+SUA1G1t5Ed/yQia0efrA==", - "optional": true, "requires": { "@babel/types": "^7.15.4" } @@ -274,7 +260,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.15.4.tgz", "integrity": "sha512-HsFqhLDZ08DxCpBdEVtKmywj6PQbwnF6HHybur0MAnkAKnlS6uHkwnmRIkElB2Owpfb4xL4NwDmDLFubueDXsw==", - "optional": true, "requires": { "@babel/types": "^7.15.4" } @@ -282,14 +267,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/highlight": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.5", "chalk": "^2.0.0", @@ -299,14 +282,12 @@ "@babel/parser": { "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.15.8.tgz", - "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==", - "optional": true + "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==" }, "@babel/template": { "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.15.4.tgz", "integrity": "sha512-UgBAfEa1oGuYgDIPM2G+aHa4Nlo9Lh6mGD2bDBGMTbYnc38vulXPuC1MGjYILIEmlwl6Rd+BPR9ee3gm20CBtg==", - "optional": true, "requires": { "@babel/code-frame": "^7.14.5", "@babel/parser": "^7.15.4", @@ -317,7 +298,6 @@ "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -349,7 +329,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.15.4.tgz", "integrity": "sha512-VTy085egb3jUGVK9ycIxQiPbquesq0HUQ+tPO0uv5mPEBZipk+5FkRKiWq5apuyTE9FUrjENB0rCf8y+n+UuhA==", - "optional": true, "requires": { "@babel/types": "^7.15.4" }, @@ -357,14 +336,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/types": { "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -376,7 +353,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.15.4.tgz", "integrity": "sha512-cokOMkxC/BTyNP1AlY25HuBWM32iCEsLPI4BHDpJCHHm1FU2E7dKWWIXJgQgSFiu4lp8q3bL1BIKwqkSUviqtA==", - "optional": true, "requires": { "@babel/types": "^7.15.4" }, @@ -384,14 +360,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/types": { "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -446,7 +420,6 @@ "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz", "integrity": "sha512-2IAnmn8zbvC/jKYhq5Ki9I+DwjlrtMPUCH/CpHvqI4dNnlwHwsxoIhlc8WcYY5LSYknXQtAlFYuHfqAFCvQ4Wg==", - "optional": true, "requires": { "@babel/highlight": "^7.14.5" } @@ -486,7 +459,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.15.4.tgz", "integrity": "sha512-HsFqhLDZ08DxCpBdEVtKmywj6PQbwnF6HHybur0MAnkAKnlS6uHkwnmRIkElB2Owpfb4xL4NwDmDLFubueDXsw==", - "optional": true, "requires": { "@babel/types": "^7.15.4" } @@ -494,14 +466,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/highlight": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.5", "chalk": "^2.0.0", @@ -511,14 +481,12 @@ "@babel/parser": { "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.15.8.tgz", - "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==", - "optional": true + "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==" }, "@babel/template": { "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.15.4.tgz", "integrity": "sha512-UgBAfEa1oGuYgDIPM2G+aHa4Nlo9Lh6mGD2bDBGMTbYnc38vulXPuC1MGjYILIEmlwl6Rd+BPR9ee3gm20CBtg==", - "optional": true, "requires": { "@babel/code-frame": "^7.14.5", "@babel/parser": "^7.15.4", @@ -546,7 +514,6 @@ "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -558,7 +525,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.15.4.tgz", "integrity": "sha512-E/z9rfbAOt1vDW1DR7k4SzhzotVV5+qMciWV6LaG1g4jeFrkDlJedjtV4h0i4Q/ITnUu+Pk08M7fczsB9GXBDw==", - "optional": true, "requires": { "@babel/types": "^7.15.4" }, @@ -566,14 +532,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/types": { "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -584,14 +548,12 @@ "@babel/helper-plugin-utils": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz", - "integrity": "sha512-/37qQCE3K0vvZKwoK4XU/irIJQdIfCJuhU5eKnNxpFDsOkgFaUAwbv+RYw6eYgsC0E4hS7r5KqGULUogqui0fQ==", - "optional": true + "integrity": "sha512-/37qQCE3K0vvZKwoK4XU/irIJQdIfCJuhU5eKnNxpFDsOkgFaUAwbv+RYw6eYgsC0E4hS7r5KqGULUogqui0fQ==" }, "@babel/helper-replace-supers": { "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.15.4.tgz", "integrity": "sha512-/ztT6khaXF37MS47fufrKvIsiQkx1LBRvSJNzRqmbyeZnTwU9qBxXYLaaT/6KaxfKhjs2Wy8kG8ZdsFUuWBjzw==", - "optional": true, "requires": { "@babel/helper-member-expression-to-functions": "^7.15.4", "@babel/helper-optimise-call-expression": "^7.15.4", @@ -603,7 +565,6 @@ "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz", "integrity": "sha512-2IAnmn8zbvC/jKYhq5Ki9I+DwjlrtMPUCH/CpHvqI4dNnlwHwsxoIhlc8WcYY5LSYknXQtAlFYuHfqAFCvQ4Wg==", - "optional": true, "requires": { "@babel/highlight": "^7.14.5" } @@ -612,7 +573,6 @@ "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.15.8.tgz", "integrity": "sha512-ECmAKstXbp1cvpTTZciZCgfOt6iN64lR0d+euv3UZisU5awfRawOvg07Utn/qBGuH4bRIEZKrA/4LzZyXhZr8g==", - "optional": true, "requires": { "@babel/types": "^7.15.6", "jsesc": "^2.5.1", @@ -623,7 +583,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.15.4.tgz", "integrity": "sha512-Z91cOMM4DseLIGOnog+Z8OI6YseR9bua+HpvLAQ2XayUGU+neTtX+97caALaLdyu53I/fjhbeCnWnRH1O3jFOw==", - "optional": true, "requires": { "@babel/helper-get-function-arity": "^7.15.4", "@babel/template": "^7.15.4", @@ -634,7 +593,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.15.4.tgz", "integrity": "sha512-1/AlxSF92CmGZzHnC515hm4SirTxtpDnLEJ0UyEMgTMZN+6bxXKg04dKhiRx5Enel+SUA1G1t5Ed/yQia0efrA==", - "optional": true, "requires": { "@babel/types": "^7.15.4" } @@ -643,7 +601,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.15.4.tgz", "integrity": "sha512-HsFqhLDZ08DxCpBdEVtKmywj6PQbwnF6HHybur0MAnkAKnlS6uHkwnmRIkElB2Owpfb4xL4NwDmDLFubueDXsw==", - "optional": true, "requires": { "@babel/types": "^7.15.4" } @@ -651,14 +608,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/highlight": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.5", "chalk": "^2.0.0", @@ -668,14 +623,12 @@ "@babel/parser": { "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.15.8.tgz", - "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==", - "optional": true + "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==" }, "@babel/template": { "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.15.4.tgz", "integrity": "sha512-UgBAfEa1oGuYgDIPM2G+aHa4Nlo9Lh6mGD2bDBGMTbYnc38vulXPuC1MGjYILIEmlwl6Rd+BPR9ee3gm20CBtg==", - "optional": true, "requires": { "@babel/code-frame": "^7.14.5", "@babel/parser": "^7.15.4", @@ -686,7 +639,6 @@ "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.15.4.tgz", "integrity": "sha512-W6lQD8l4rUbQR/vYgSuCAE75ADyyQvOpFVsvPPdkhf6lATXAsQIG9YdtOcu8BB1dZ0LKu+Zo3c1wEcbKeuhdlA==", - "optional": true, "requires": { "@babel/code-frame": "^7.14.5", "@babel/generator": "^7.15.4", @@ -703,7 +655,6 @@ "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -756,8 +707,7 @@ "@babel/helper-validator-option": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.14.5.tgz", - "integrity": "sha512-OX8D5eeX4XwcroVW45NMvoYaIuFI+GQpA2a8Gi+X/U/cDUIRsV37qQfF905F0htTRCREQIB4KqPeaveRJUl3Ow==", - "optional": true + "integrity": "sha512-OX8D5eeX4XwcroVW45NMvoYaIuFI+GQpA2a8Gi+X/U/cDUIRsV37qQfF905F0htTRCREQIB4KqPeaveRJUl3Ow==" }, "@babel/helpers": { "version": "7.15.4", @@ -774,7 +724,6 @@ "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz", "integrity": "sha512-2IAnmn8zbvC/jKYhq5Ki9I+DwjlrtMPUCH/CpHvqI4dNnlwHwsxoIhlc8WcYY5LSYknXQtAlFYuHfqAFCvQ4Wg==", - "optional": true, "requires": { "@babel/highlight": "^7.14.5" } @@ -822,14 +771,12 @@ "@babel/helper-validator-identifier": { "version": "7.15.7", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz", - "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==", - "optional": true + "integrity": "sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w==" }, "@babel/highlight": { "version": "7.14.5", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.5", "chalk": "^2.0.0", @@ -839,14 +786,12 @@ "@babel/parser": { "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.15.8.tgz", - "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==", - "optional": true + "integrity": "sha512-BRYa3wcQnjS/nqI8Ac94pYYpJfojHVvVXJ97+IDCImX4Jc8W8Xv1+47enbruk+q1etOpsQNwnfFcNGw+gtPGxA==" }, "@babel/template": { "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.15.4.tgz", "integrity": "sha512-UgBAfEa1oGuYgDIPM2G+aHa4Nlo9Lh6mGD2bDBGMTbYnc38vulXPuC1MGjYILIEmlwl6Rd+BPR9ee3gm20CBtg==", - "optional": true, "requires": { "@babel/code-frame": "^7.14.5", "@babel/parser": "^7.15.4", @@ -874,7 +819,6 @@ "version": "7.15.6", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", - "optional": true, "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -1008,19 +952,19 @@ } }, "@eslint/eslintrc": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.2.1.tgz", - "integrity": "sha512-bxvbYnBPN1Gibwyp6NrpnFzA3YtRL3BBAyEAFVIpNTm2Rn4Vy87GA5M4aSn3InRrlsbX5N0GW7XIx+U4SAEKdQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.0.tgz", + "integrity": "sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==", "dev": true, "requires": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.3.1", - "globals": "^13.9.0", + "espree": "^9.3.2", + "globals": "^13.15.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", - "minimatch": "^3.0.4", + "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" }, "dependencies": { @@ -1040,9 +984,9 @@ } }, "globals": { - "version": "13.13.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.13.0.tgz", - "integrity": "sha512-EQ7Q18AJlPwp3vUDL4mKA0KXrXyNIQyWon6T6XQiBQF0XHvRsiCSrWmmeATpUzdJN2HhWZU6Pdl0a9zdep5p6A==", + "version": "13.15.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.15.0.tgz", + "integrity": "sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog==", "dev": true, "requires": { "type-fest": "^0.20.2" @@ -1057,6 +1001,15 @@ "argparse": "^2.0.1" } }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, "strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", @@ -1116,9 +1069,9 @@ } }, "@grpc/proto-loader": { - "version": "0.6.9", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.6.9.tgz", - "integrity": "sha512-UlcCS8VbsU9d3XTXGiEVFonN7hXk+oMXZtoHHG2oSA1/GcDP1q6OUgs20PzHDGizzyi8ufGSUDlk3O2NyY7leg==", + "version": "0.6.12", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.6.12.tgz", + "integrity": "sha512-filTVbETFnxb9CyRX98zN18ilChTuf/C5scZ2xyaOTp0EHGq0/ufX8rjqXUcSb1Gpv7eZq4M2jDvbh9BogKnrg==", "requires": { "@types/long": "^4.0.1", "lodash.camelcase": "^4.3.0", @@ -1245,9 +1198,22 @@ "dev": true }, "@opentelemetry/api": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.0.4.tgz", - "integrity": "sha512-BuJuXRSJNQ3QoKA6GWWDyuLpOUck+9hAXNMCnrloc1aWVoy6Xq6t9PUV08aBZ4Lutqq2LEHM486bpZqoViScog==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.1.0.tgz", + "integrity": "sha512-hf+3bwuBwtXsugA2ULBc95qxrOqP2pOekLz34BJhcAKawt94vfeNyUKpYc0lZQ/3sCP6LqRa7UAdHA7i5UODzQ==" + }, + "@opentelemetry/api-metrics": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-metrics/-/api-metrics-0.27.0.tgz", + "integrity": "sha512-tB79288bwjkdhPNpw4UdOEy3bacVwtol6Que7cAu8KEJ9ULjRfSiwpYEwJY/oER3xZ7zNFz0uiJ7N1jSiotpVA==" + }, + "@opentelemetry/core": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.0.1.tgz", + "integrity": "sha512-90nQ2X6b/8X+xjcLDBYKooAcOsIlwLRYm+1VsxcX5cHl6V4CSVmDpBreQSDH/A21SqROzapk6813008SatmPpQ==", + "requires": { + "@opentelemetry/semantic-conventions": "1.0.1" + } }, "@opentelemetry/exporter-metrics-otlp-http": { "version": "0.27.0", @@ -1259,35 +1225,6 @@ "@opentelemetry/exporter-trace-otlp-http": "0.27.0", "@opentelemetry/resources": "1.0.1", "@opentelemetry/sdk-metrics-base": "0.27.0" - }, - "dependencies": { - "@opentelemetry/api-metrics": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api-metrics/-/api-metrics-0.27.0.tgz", - "integrity": "sha512-tB79288bwjkdhPNpw4UdOEy3bacVwtol6Que7cAu8KEJ9ULjRfSiwpYEwJY/oER3xZ7zNFz0uiJ7N1jSiotpVA==" - }, - "@opentelemetry/core": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.0.1.tgz", - "integrity": "sha512-90nQ2X6b/8X+xjcLDBYKooAcOsIlwLRYm+1VsxcX5cHl6V4CSVmDpBreQSDH/A21SqROzapk6813008SatmPpQ==", - "requires": { - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/resources": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.0.1.tgz", - "integrity": "sha512-p8DevOaAEepPucUtImR4cZKHOE2L1jgQAtkdZporV+XnxPA/HqCHPEESyUVuo4f5M0NUlL6k5Pba75KwNJlTRg==", - "requires": { - "@opentelemetry/core": "1.0.1", - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/semantic-conventions": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.0.1.tgz", - "integrity": "sha512-7XU1sfQ8uCVcXLxtAHA8r3qaLJ2oq7sKtEwzZhzuEXqYmjW+n+J4yM3kNo0HQo3Xp1eUe47UM6Wy6yuAvIyllg==" - } } }, "@opentelemetry/exporter-metrics-otlp-proto": { @@ -1303,30 +1240,6 @@ "@opentelemetry/resources": "1.0.1", "@opentelemetry/sdk-metrics-base": "0.27.0", "protobufjs": "^6.9.0" - }, - "dependencies": { - "@opentelemetry/core": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.0.1.tgz", - "integrity": "sha512-90nQ2X6b/8X+xjcLDBYKooAcOsIlwLRYm+1VsxcX5cHl6V4CSVmDpBreQSDH/A21SqROzapk6813008SatmPpQ==", - "requires": { - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/resources": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.0.1.tgz", - "integrity": "sha512-p8DevOaAEepPucUtImR4cZKHOE2L1jgQAtkdZporV+XnxPA/HqCHPEESyUVuo4f5M0NUlL6k5Pba75KwNJlTRg==", - "requires": { - "@opentelemetry/core": "1.0.1", - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/semantic-conventions": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.0.1.tgz", - "integrity": "sha512-7XU1sfQ8uCVcXLxtAHA8r3qaLJ2oq7sKtEwzZhzuEXqYmjW+n+J4yM3kNo0HQo3Xp1eUe47UM6Wy6yuAvIyllg==" - } } }, "@opentelemetry/exporter-trace-otlp-http": { @@ -1337,40 +1250,6 @@ "@opentelemetry/core": "1.0.1", "@opentelemetry/resources": "1.0.1", "@opentelemetry/sdk-trace-base": "1.0.1" - }, - "dependencies": { - "@opentelemetry/core": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.0.1.tgz", - "integrity": "sha512-90nQ2X6b/8X+xjcLDBYKooAcOsIlwLRYm+1VsxcX5cHl6V4CSVmDpBreQSDH/A21SqROzapk6813008SatmPpQ==", - "requires": { - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/resources": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.0.1.tgz", - "integrity": "sha512-p8DevOaAEepPucUtImR4cZKHOE2L1jgQAtkdZporV+XnxPA/HqCHPEESyUVuo4f5M0NUlL6k5Pba75KwNJlTRg==", - "requires": { - "@opentelemetry/core": "1.0.1", - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/sdk-trace-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.0.1.tgz", - "integrity": "sha512-JVSAepTpW7dnqfV7XFN0zHj1jXGNd5OcvIGQl76buogqffdgJdgJWQNrOuUJaus56zrOtlzqFH+YtMA9RGEg8w==", - "requires": { - "@opentelemetry/core": "1.0.1", - "@opentelemetry/resources": "1.0.1", - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/semantic-conventions": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.0.1.tgz", - "integrity": "sha512-7XU1sfQ8uCVcXLxtAHA8r3qaLJ2oq7sKtEwzZhzuEXqYmjW+n+J4yM3kNo0HQo3Xp1eUe47UM6Wy6yuAvIyllg==" - } } }, "@opentelemetry/exporter-trace-otlp-proto": { @@ -1384,40 +1263,15 @@ "@opentelemetry/resources": "1.0.1", "@opentelemetry/sdk-trace-base": "1.0.1", "protobufjs": "^6.9.0" - }, - "dependencies": { - "@opentelemetry/core": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.0.1.tgz", - "integrity": "sha512-90nQ2X6b/8X+xjcLDBYKooAcOsIlwLRYm+1VsxcX5cHl6V4CSVmDpBreQSDH/A21SqROzapk6813008SatmPpQ==", - "requires": { - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/resources": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.0.1.tgz", - "integrity": "sha512-p8DevOaAEepPucUtImR4cZKHOE2L1jgQAtkdZporV+XnxPA/HqCHPEESyUVuo4f5M0NUlL6k5Pba75KwNJlTRg==", - "requires": { - "@opentelemetry/core": "1.0.1", - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/sdk-trace-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.0.1.tgz", - "integrity": "sha512-JVSAepTpW7dnqfV7XFN0zHj1jXGNd5OcvIGQl76buogqffdgJdgJWQNrOuUJaus56zrOtlzqFH+YtMA9RGEg8w==", - "requires": { - "@opentelemetry/core": "1.0.1", - "@opentelemetry/resources": "1.0.1", - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/semantic-conventions": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.0.1.tgz", - "integrity": "sha512-7XU1sfQ8uCVcXLxtAHA8r3qaLJ2oq7sKtEwzZhzuEXqYmjW+n+J4yM3kNo0HQo3Xp1eUe47UM6Wy6yuAvIyllg==" - } + } + }, + "@opentelemetry/resources": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.0.1.tgz", + "integrity": "sha512-p8DevOaAEepPucUtImR4cZKHOE2L1jgQAtkdZporV+XnxPA/HqCHPEESyUVuo4f5M0NUlL6k5Pba75KwNJlTRg==", + "requires": { + "@opentelemetry/core": "1.0.1", + "@opentelemetry/semantic-conventions": "1.0.1" } }, "@opentelemetry/sdk-metrics-base": { @@ -1429,41 +1283,27 @@ "@opentelemetry/core": "1.0.1", "@opentelemetry/resources": "1.0.1", "lodash.merge": "^4.6.2" - }, - "dependencies": { - "@opentelemetry/api-metrics": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api-metrics/-/api-metrics-0.27.0.tgz", - "integrity": "sha512-tB79288bwjkdhPNpw4UdOEy3bacVwtol6Que7cAu8KEJ9ULjRfSiwpYEwJY/oER3xZ7zNFz0uiJ7N1jSiotpVA==" - }, - "@opentelemetry/core": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.0.1.tgz", - "integrity": "sha512-90nQ2X6b/8X+xjcLDBYKooAcOsIlwLRYm+1VsxcX5cHl6V4CSVmDpBreQSDH/A21SqROzapk6813008SatmPpQ==", - "requires": { - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/resources": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.0.1.tgz", - "integrity": "sha512-p8DevOaAEepPucUtImR4cZKHOE2L1jgQAtkdZporV+XnxPA/HqCHPEESyUVuo4f5M0NUlL6k5Pba75KwNJlTRg==", - "requires": { - "@opentelemetry/core": "1.0.1", - "@opentelemetry/semantic-conventions": "1.0.1" - } - }, - "@opentelemetry/semantic-conventions": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.0.1.tgz", - "integrity": "sha512-7XU1sfQ8uCVcXLxtAHA8r3qaLJ2oq7sKtEwzZhzuEXqYmjW+n+J4yM3kNo0HQo3Xp1eUe47UM6Wy6yuAvIyllg==" - } } }, + "@opentelemetry/sdk-trace-base": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.0.1.tgz", + "integrity": "sha512-JVSAepTpW7dnqfV7XFN0zHj1jXGNd5OcvIGQl76buogqffdgJdgJWQNrOuUJaus56zrOtlzqFH+YtMA9RGEg8w==", + "requires": { + "@opentelemetry/core": "1.0.1", + "@opentelemetry/resources": "1.0.1", + "@opentelemetry/semantic-conventions": "1.0.1" + } + }, + "@opentelemetry/semantic-conventions": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.0.1.tgz", + "integrity": "sha512-7XU1sfQ8uCVcXLxtAHA8r3qaLJ2oq7sKtEwzZhzuEXqYmjW+n+J4yM3kNo0HQo3Xp1eUe47UM6Wy6yuAvIyllg==" + }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" }, "@protobufjs/base64": { "version": "1.1.2", @@ -1478,12 +1318,12 @@ "@protobufjs/eventemitter": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" }, "@protobufjs/fetch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", "requires": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" @@ -1492,27 +1332,27 @@ "@protobufjs/float": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" }, "@protobufjs/inquire": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" }, "@protobufjs/path": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" }, "@protobufjs/pool": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" }, "@protobufjs/utf8": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" }, "@sinonjs/commons": { "version": "1.8.3", @@ -1554,7 +1394,6 @@ "version": "5.4.1", "resolved": "https://registry.npmjs.org/@stryker-mutator/api/-/api-5.4.1.tgz", "integrity": "sha512-NWO2YvNGjXvZ6yvcpWCDCWRpFjFKUUInUNqnD1rtD4cOnqWX458ViHeHhNsEQ1b5c22zDw/MedAbUwkvudXiWg==", - "optional": true, "requires": { "mutation-testing-metrics": "1.7.5", "mutation-testing-report-schema": "1.7.4", @@ -1564,8 +1403,7 @@ "tslib": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", - "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==", - "optional": true + "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" } } }, @@ -1629,14 +1467,12 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "optional": true + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "optional": true, "requires": { "color-convert": "^2.0.1" } @@ -1645,7 +1481,6 @@ "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "optional": true, "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -1670,7 +1505,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "optional": true, "requires": { "color-name": "~1.1.4" } @@ -1678,8 +1512,7 @@ "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "optional": true + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "commander": { "version": "8.1.0", @@ -1706,7 +1539,6 @@ "version": "7.2.0", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", - "optional": true, "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -1719,8 +1551,7 @@ "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "optional": true + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "inquirer": { "version": "8.1.5", @@ -1806,7 +1637,6 @@ "version": "7.3.1", "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.3.1.tgz", "integrity": "sha512-vNenx7gqjPyeKpRnM6S5Ksm/oFTRijWWzYlRON04KaehZ3YjDwEmVjGUGo0TKWVjeNXOujVRlh0K1drUbcdPkw==", - "optional": true, "requires": { "tslib": "~2.1.0" }, @@ -1814,8 +1644,7 @@ "tslib": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.1.0.tgz", - "integrity": "sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==", - "optional": true + "integrity": "sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==" } } }, @@ -1849,7 +1678,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "optional": true, "requires": { "ansi-regex": "^5.0.1" } @@ -1858,7 +1686,6 @@ "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "optional": true, "requires": { "has-flag": "^4.0.0" } @@ -1963,7 +1790,6 @@ "version": "5.4.1", "resolved": "https://registry.npmjs.org/@stryker-mutator/util/-/util-5.4.1.tgz", "integrity": "sha512-G0IaLUO15Rk7otvSz8/ayAuUW9AvGRxQZNZnNut44YKR0J1dk3rI1sFhQwaAh3gKFElm6FntToDoChI4eGZElg==", - "optional": true, "requires": { "lodash.flatmap": "~4.5.0" } @@ -1981,7 +1807,7 @@ "@types/json5": { "version": "0.0.29", "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, "@types/lodash": { @@ -2016,9 +1842,9 @@ } }, "acorn": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", - "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "dev": true }, "acorn-jsx": { @@ -2145,52 +1971,83 @@ "dev": true }, "array-includes": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.4.tgz", - "integrity": "sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.5.tgz", + "integrity": "sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==", "dev": true, "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5", "get-intrinsic": "^1.1.1", "is-string": "^1.0.7" }, "dependencies": { + "define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dev": true, + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + }, "es-abstract": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", - "integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==", + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.1.tgz", + "integrity": "sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==", "dev": true, "requires": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", + "function.prototype.name": "^1.1.5", "get-intrinsic": "^1.1.1", "get-symbol-description": "^1.0.0", "has": "^1.0.3", - "has-symbols": "^1.0.2", + "has-property-descriptors": "^1.0.0", + "has-symbols": "^1.0.3", "internal-slot": "^1.0.3", "is-callable": "^1.2.4", - "is-negative-zero": "^2.0.1", + "is-negative-zero": "^2.0.2", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.1", + "is-shared-array-buffer": "^1.0.2", "is-string": "^1.0.7", - "is-weakref": "^1.0.1", - "object-inspect": "^1.11.0", + "is-weakref": "^1.0.2", + "object-inspect": "^1.12.0", "object-keys": "^1.1.1", "object.assign": "^4.1.2", - "string.prototype.trimend": "^1.0.4", - "string.prototype.trimstart": "^1.0.4", - "unbox-primitive": "^1.0.1" + "regexp.prototype.flags": "^1.4.3", + "string.prototype.trimend": "^1.0.5", + "string.prototype.trimstart": "^1.0.5", + "unbox-primitive": "^1.0.2" } }, + "has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "dev": true + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true + }, "is-callable": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", "dev": true }, + "is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "dev": true + }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -2201,6 +2058,15 @@ "has-tostringtag": "^1.0.0" } }, + "is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "is-string": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", @@ -2210,59 +2076,135 @@ "has-tostringtag": "^1.0.0" } }, + "is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", "dev": true + }, + "regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + } + }, + "string.prototype.trimend": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", + "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + } + }, + "string.prototype.trimstart": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", + "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + } + }, + "unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + } } } }, "array.prototype.flat": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.5.tgz", - "integrity": "sha512-KaYU+S+ndVqyUnignHftkwc58o3uVU1jzczILJ1tN2YaIZpFIKBiP/x/j97E5MVPsaCloPbqWLB/8qCTVvT2qg==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz", + "integrity": "sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==", "dev": true, "requires": { "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.19.0" + "es-abstract": "^1.19.2", + "es-shim-unscopables": "^1.0.0" }, "dependencies": { "es-abstract": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", - "integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==", + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.1.tgz", + "integrity": "sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==", "dev": true, "requires": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", + "function.prototype.name": "^1.1.5", "get-intrinsic": "^1.1.1", "get-symbol-description": "^1.0.0", "has": "^1.0.3", - "has-symbols": "^1.0.2", + "has-property-descriptors": "^1.0.0", + "has-symbols": "^1.0.3", "internal-slot": "^1.0.3", "is-callable": "^1.2.4", - "is-negative-zero": "^2.0.1", + "is-negative-zero": "^2.0.2", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.1", + "is-shared-array-buffer": "^1.0.2", "is-string": "^1.0.7", - "is-weakref": "^1.0.1", - "object-inspect": "^1.11.0", + "is-weakref": "^1.0.2", + "object-inspect": "^1.12.0", "object-keys": "^1.1.1", "object.assign": "^4.1.2", - "string.prototype.trimend": "^1.0.4", - "string.prototype.trimstart": "^1.0.4", - "unbox-primitive": "^1.0.1" + "regexp.prototype.flags": "^1.4.3", + "string.prototype.trimend": "^1.0.5", + "string.prototype.trimstart": "^1.0.5", + "unbox-primitive": "^1.0.2" } }, + "has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "dev": true + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true + }, "is-callable": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", "dev": true }, + "is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "dev": true + }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -2273,6 +2215,15 @@ "has-tostringtag": "^1.0.0" } }, + "is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "is-string": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", @@ -2282,11 +2233,89 @@ "has-tostringtag": "^1.0.0" } }, + "is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", "dev": true + }, + "regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + } + }, + "string.prototype.trimend": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", + "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "dependencies": { + "define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dev": true, + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + } + } + }, + "string.prototype.trimstart": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", + "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "dependencies": { + "define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dev": true, + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + } + } + }, + "unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + } } } }, @@ -3057,7 +3086,6 @@ "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "optional": true, "requires": { "once": "^1.4.0" } @@ -3095,6 +3123,15 @@ "unbox-primitive": "^1.0.1" } }, + "es-shim-unscopables": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, "es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", @@ -3123,12 +3160,12 @@ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" }, "eslint": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.13.0.tgz", - "integrity": "sha512-D+Xei61eInqauAyTJ6C0q6x9mx7kTUC1KZ0m0LSEexR0V+e94K12LmWX076ZIsldwfQ2RONdaJe0re0TRGQbRQ==", + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.16.0.tgz", + "integrity": "sha512-MBndsoXY/PeVTDJeWsYj7kLZ5hQpJOfMYLsF6LicLHQWbRDG19lK5jOix4DPl8yY4SUFcE3txy86OzFLWT+yoA==", "dev": true, "requires": { - "@eslint/eslintrc": "^1.2.1", + "@eslint/eslintrc": "^1.3.0", "@humanwhocodes/config-array": "^0.9.2", "ajv": "^6.10.0", "chalk": "^4.0.0", @@ -3139,14 +3176,14 @@ "eslint-scope": "^7.1.1", "eslint-utils": "^3.0.0", "eslint-visitor-keys": "^3.3.0", - "espree": "^9.3.1", + "espree": "^9.3.2", "esquery": "^1.4.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", "glob-parent": "^6.0.1", - "globals": "^13.6.0", + "globals": "^13.15.0", "ignore": "^5.2.0", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", @@ -3155,7 +3192,7 @@ "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.1", "regexpp": "^3.2.0", @@ -3256,9 +3293,9 @@ } }, "globals": { - "version": "13.13.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.13.0.tgz", - "integrity": "sha512-EQ7Q18AJlPwp3vUDL4mKA0KXrXyNIQyWon6T6XQiBQF0XHvRsiCSrWmmeATpUzdJN2HhWZU6Pdl0a9zdep5p6A==", + "version": "13.15.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.15.0.tgz", + "integrity": "sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog==", "dev": true, "requires": { "type-fest": "^0.20.2" @@ -3279,6 +3316,15 @@ "argparse": "^2.0.1" } }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -3312,14 +3358,23 @@ } }, "eslint-config-airbnb-base": { - "version": "14.2.1", - "resolved": "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-14.2.1.tgz", - "integrity": "sha512-GOrQyDtVEc1Xy20U7vsB2yAoB4nBlfH5HZJeatRXHleO+OS5Ot+MWij4Dpltw4/DyIkqUfqz1epfhVR5XWWQPA==", + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz", + "integrity": "sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==", "dev": true, "requires": { "confusing-browser-globals": "^1.0.10", "object.assign": "^4.1.2", - "object.entries": "^1.1.2" + "object.entries": "^1.1.5", + "semver": "^6.3.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } } }, "eslint-import-resolver-node": { @@ -3344,9 +3399,9 @@ } }, "eslint-module-utils": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.2.tgz", - "integrity": "sha512-zquepFnWCY2ISMFwD/DqzaM++H+7PDzOpUvotJWm/y1BAFt5R4oeULgdrTejKqLkz7MA/tgstsUMNYc7wNdTrg==", + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz", + "integrity": "sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ==", "dev": true, "requires": { "debug": "^3.2.7", @@ -3365,9 +3420,9 @@ } }, "eslint-plugin-import": { - "version": "2.25.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.25.4.tgz", - "integrity": "sha512-/KJBASVFxpu0xg1kIBn9AUa8hQVnszpwgE7Ld0lKAlx7Ie87yzEzCgSkekt+le/YVhiaosO4Y14GDAOc41nfxA==", + "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", "dev": true, "requires": { "array-includes": "^3.1.4", @@ -3375,14 +3430,14 @@ "debug": "^2.6.9", "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.6", - "eslint-module-utils": "^2.7.2", + "eslint-module-utils": "^2.7.3", "has": "^1.0.3", - "is-core-module": "^2.8.0", + "is-core-module": "^2.8.1", "is-glob": "^4.0.3", - "minimatch": "^3.0.4", + "minimatch": "^3.1.2", "object.values": "^1.1.5", - "resolve": "^1.20.0", - "tsconfig-paths": "^3.12.0" + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" }, "dependencies": { "debug": { @@ -3395,9 +3450,9 @@ } }, "is-core-module": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", - "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz", + "integrity": "sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==", "dev": true, "requires": { "has": "^1.0.3" @@ -3412,11 +3467,31 @@ "is-extglob": "^2.1.1" } }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } } } }, @@ -3454,13 +3529,13 @@ "dev": true }, "espree": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.1.tgz", - "integrity": "sha512-bvdyLmJMfwkV3NCRl5ZhJf22zBFo1y8bYh3VYb+bfzqNB4Je68P2sSuXyuFquzWLebHpNd2/d5uv7yoP9ISnGQ==", + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.2.tgz", + "integrity": "sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==", "dev": true, "requires": { - "acorn": "^8.7.0", - "acorn-jsx": "^5.3.1", + "acorn": "^8.7.1", + "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^3.3.0" } }, @@ -3636,7 +3711,7 @@ "fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, "fast-text-encoding": { @@ -3752,7 +3827,7 @@ "find-up": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", "dev": true, "requires": { "locate-path": "^2.0.0" @@ -3902,10 +3977,197 @@ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, + "function.prototype.name": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", + "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0", + "functions-have-names": "^1.2.2" + }, + "dependencies": { + "es-abstract": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.1.tgz", + "integrity": "sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "function.prototype.name": "^1.1.5", + "get-intrinsic": "^1.1.1", + "get-symbol-description": "^1.0.0", + "has": "^1.0.3", + "has-property-descriptors": "^1.0.0", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.3", + "is-callable": "^1.2.4", + "is-negative-zero": "^2.0.2", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "is-string": "^1.0.7", + "is-weakref": "^1.0.2", + "object-inspect": "^1.12.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.2", + "regexp.prototype.flags": "^1.4.3", + "string.prototype.trimend": "^1.0.5", + "string.prototype.trimstart": "^1.0.5", + "unbox-primitive": "^1.0.2" + } + }, + "has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "dev": true + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true + }, + "is-callable": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", + "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", + "dev": true + }, + "is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "dev": true + }, + "is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, + "is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, + "object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "dev": true + }, + "regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + } + }, + "string.prototype.trimend": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", + "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "dependencies": { + "define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dev": true, + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + } + } + }, + "string.prototype.trimstart": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", + "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "dependencies": { + "define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dev": true, + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + } + } + }, + "unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + } + } + } + }, "functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", + "dev": true + }, + "functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", "dev": true }, "gauge": { @@ -4180,6 +4442,15 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" }, + "has-property-descriptors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "dev": true, + "requires": { + "get-intrinsic": "^1.1.1" + } + }, "has-symbols": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", @@ -4507,8 +4778,7 @@ "is-unicode-supported": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", - "optional": true + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==" }, "is-weakref": { "version": "1.0.1", @@ -4687,7 +4957,7 @@ "json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, "json-stringify-safe": { @@ -4857,7 +5127,7 @@ "locate-path": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", "dev": true, "requires": { "p-locate": "^2.0.0", @@ -4883,8 +5153,7 @@ "lodash.flatmap": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.flatmap/-/lodash.flatmap-4.5.0.tgz", - "integrity": "sha1-74y/QI9uSCaGYzRTBcaswLd4cC4=", - "optional": true + "integrity": "sha1-74y/QI9uSCaGYzRTBcaswLd4cC4=" }, "lodash.flattendeep": { "version": "4.4.0", @@ -5015,9 +5284,9 @@ } }, "memfs": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.4.1.tgz", - "integrity": "sha512-1c9VPVvW5P7I85c35zAdEr1TD5+F11IToIHIlrVIcflfnzPkJa0ZoYEoEdYDP8KgPFoSZ/opDrUsAoZWym3mtw==", + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.4.4.tgz", + "integrity": "sha512-W4gHNUE++1oSJVn8Y68jPXi+mkx3fXR5ITE/Ubz6EQ3xRpCN5k2CQ4AUR8094Z7211F876TyoBACGsIveqgiGA==", "dev": true, "requires": { "fs-monkey": "1.0.3" @@ -5360,7 +5629,6 @@ "version": "1.7.5", "resolved": "https://registry.npmjs.org/mutation-testing-metrics/-/mutation-testing-metrics-1.7.5.tgz", "integrity": "sha512-BkXuzaMHzP3V+1QlScJ0es13PWEIXsc48t8/OMuCB/RDyCKKblZNlGb7KpY4oDgU0VIFMR6sBJ4F3IFkY6Elnw==", - "optional": true, "requires": { "mutation-testing-report-schema": "1.7.4" } @@ -5368,8 +5636,7 @@ "mutation-testing-report-schema": { "version": "1.7.4", "resolved": "https://registry.npmjs.org/mutation-testing-report-schema/-/mutation-testing-report-schema-1.7.4.tgz", - "integrity": "sha512-69CxAaIBprkxvHkZ/1zDJesFOxiXAKUpOeK6xUHAmfqMW3zYfb+nPae40GwTQt9WFFCHj56O6d6GJzR7Qm2ZwQ==", - "optional": true + "integrity": "sha512-69CxAaIBprkxvHkZ/1zDJesFOxiXAKUpOeK6xUHAmfqMW3zYfb+nPae40GwTQt9WFFCHj56O6d6GJzR7Qm2ZwQ==" }, "nan": { "version": "2.14.2", @@ -5385,7 +5652,7 @@ "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, "needle": { @@ -5544,15 +5811,6 @@ "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==", "optional": true }, - "node-scp": { - "version": "0.0.14", - "resolved": "https://registry.npmjs.org/node-scp/-/node-scp-0.0.14.tgz", - "integrity": "sha512-HKia9etcBcC2B659tg+fUkWcAy58ib8MmAcENDvs/R+t4e5w0Hy7Vyey/VJsR01rrgqSTxtqvoab2AQsqp/SXA==", - "dev": true, - "requires": { - "ssh2": "^0.8.9" - } - }, "node-zookeeper-client": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/node-zookeeper-client/-/node-zookeeper-client-0.2.3.tgz", @@ -5638,7 +5896,6 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "optional": true, "requires": { "path-key": "^3.0.0" }, @@ -5646,8 +5903,7 @@ "path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "optional": true + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" } } }, @@ -6081,7 +6337,6 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "optional": true, "requires": { "color-convert": "^2.0.1" } @@ -6111,7 +6366,6 @@ "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "optional": true, "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -6130,7 +6384,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "optional": true, "requires": { "color-name": "~1.1.4" } @@ -6138,14 +6391,12 @@ "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "optional": true + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "optional": true + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "log-symbols": { "version": "4.1.0", @@ -6206,7 +6457,6 @@ "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "optional": true, "requires": { "has-flag": "^4.0.0" } @@ -6254,7 +6504,7 @@ "p-locate": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", "dev": true, "requires": { "p-limit": "^1.1.0" @@ -6263,7 +6513,7 @@ "p-try": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", "dev": true }, "package-hash": { @@ -6425,9 +6675,9 @@ "dev": true }, "protobufjs": { - "version": "6.11.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.2.tgz", - "integrity": "sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==", + "version": "6.11.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.3.tgz", + "integrity": "sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -6445,9 +6695,9 @@ }, "dependencies": { "@types/node": { - "version": "16.7.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.7.0.tgz", - "integrity": "sha512-e66BrnjWQ3BRBZ2+iA5e85fcH9GLNe4S0n1H0T3OalK2sXg5XWEFTO4xvmGrYQ3edy+q6fdOh5t0/HOY8OAqBg==" + "version": "17.0.39", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.39.tgz", + "integrity": "sha512-JDU3YLlnPK3WDao6/DlXLOgSNpG13ct+CwIO17V8q0/9fWJyeMJJ/VyZ1lv8kDprihvZMydzVwf0tQOqGiY2Nw==" } } }, @@ -6976,6 +7226,12 @@ "has-flag": "^3.0.0" } }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, "tar": { "version": "4.4.13", "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz", @@ -7188,14 +7444,14 @@ "optional": true }, "tsconfig-paths": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.12.0.tgz", - "integrity": "sha512-e5adrnOYT6zqVnWqZu7i/BQ3BnhzvGbjEjejFXO20lKIKpwTaupkCPgEfv4GZK1IBciJUEhYs3J3p75FdaTFVg==", + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", + "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", "dev": true, "requires": { "@types/json5": "^0.0.29", "json5": "^1.0.1", - "minimist": "^1.2.0", + "minimist": "^1.2.6", "strip-bom": "^3.0.0" } }, diff --git a/package.json b/package.json index c84fa94d..d791dc83 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "f5-telemetry", - "version": "1.29.0-1", + "version": "1.30.0-1", "author": "F5 Networks", "license": "Apache-2.0", "repository": { @@ -40,7 +40,7 @@ }, "dependencies": { "@f5devcentral/f5-teem": "^1.5.0", - "@grpc/proto-loader": "^0.6.9", + "@grpc/proto-loader": "^0.6.12", "@opentelemetry/api": "^1.0.4", "@opentelemetry/exporter-metrics-otlp-proto": "^0.27.0", "@opentelemetry/sdk-metrics-base": "^0.27.0", @@ -70,19 +70,18 @@ "chai": "^4.3.6", "chai-as-promised": "^7.1.1", "deep-diff": "^1.0.2", - "eslint": "^8.13.0", - "eslint-config-airbnb-base": "^14.2.1", - "eslint-plugin-import": "^2.25.4", + "eslint": "^8.16.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-plugin-import": "^2.26.0", "grpc": "1.24.7", "grpc-mock": "^0.7.0", "icrdk": "git+https://github.com/f5devcentral/f5-icontrollx-dev-kit.git#master", - "memfs": "^3.4.1", + "memfs": "^3.4.4", "mocha": "^7.2.0", "nock": "10.0.0", - "node-scp": "0.0.14", "nyc": "^14.1.1", "object.values": "^1.1.5", - "protobufjs": "^6.11.2", + "protobufjs": "^6.11.3", "proxyquire": "^2.1.3", "sinon": "^7.5.0", "ssh2": "^0.8.9", @@ -126,7 +125,6 @@ "grpc": "This package is used for GRPC connection. Pinned to v1.24.7 as later versions do not compile on Node v6.", "mocha": "This package should use v7.X.Y. CI installs node specific mocha version for node 4 and node 6.", "nock": "This package dropped support for older node versions. Use v10.0.0 for Node v4.", - "node-scp": "This package dropped support for older node versions. Use v0.0.14 for Node v4.", "nyc": "This package dropped support for older node versions. Use v14.X.Y for Node v4.", "sinon": "This package dropped support for older node versions. Use v7.X.Y for Node v4.", "ssh2": "This packaged dropped support for older node versions. Use v0.X.Y for Node >= v5.2 and <= v10.16", diff --git a/scripts/build/publishRpm.sh b/scripts/build/publishRpm.sh deleted file mode 100644 index 8bd34997..00000000 --- a/scripts/build/publishRpm.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -set -e - -NAMESPACE=f5-telemetry-streaming-rpm - -function upload { - # arg[1] - object name - # arg[2] - path to object - url=${ARTIFACTORY_BASE_URL}/${NAMESPACE}/${1} - echo "Uploading ${1} (${2}) to ${url}" - - response=$(curl -H "Authorization: Bearer ${ARTIFACTORY_TOKEN}" -X PUT --data-binary @${2} ${url}) - responseRC=$? - - echo $response - if [[ "$responseRC" -eq 0 ]] && [[ "$response" == *created* ]]; then - echo "Upload complete" - else - echo "Upload failed" - exit 1 - fi -} - -cd dist -RPM_FILE=$(ls *.rpm) -RPM_NAME=$(basename "$RPM_FILE") - -SHA_FILE="${RPM_FILE}.sha256.txt" -SHA_NAME=$(basename "$SHA_FILE") - -sha256sum "${RPM_FILE}" > "${SHA_FILE}" -cat "${SHA_FILE}" - -upload "${RPM_NAME}" "${RPM_FILE}" -upload "${SHA_NAME}" "${SHA_FILE}" \ No newline at end of file diff --git a/src/lib/consumers/Azure_Log_Analytics/index.js b/src/lib/consumers/Azure_Log_Analytics/index.js index f2e9bfc8..d8e7fc49 100644 --- a/src/lib/consumers/Azure_Log_Analytics/index.js +++ b/src/lib/consumers/Azure_Log_Analytics/index.js @@ -37,14 +37,41 @@ module.exports = function (context) { .then((keyToUse) => { const promises = []; const tracerMsg = []; - Object.keys(context.event.data).forEach((type) => { - let data = context.event.data[type]; + /* There are several pool types: LTM and several DNS. + Pools and pool members have many-to-many relationship. + So, pool members of every type should have their own table, + even though they are not a top key of the data incoming to the consumer. */ + /* allPoolMembers is the object that will contain pool members of all pool types. + It will be populated while handling the pools + (pool members are sub objects of pools in the incoming data). */ + const allPoolMembers = {}; + const poolMemberMapping = new azureUtil.ClassPoolToMembersMapping(); + poolMemberMapping.buildPoolMemeberHolder(allPoolMembers); + // The pool members will be the handled last, when the pools are already processed. + Object.keys(context.event.data).concat(Object.keys(allPoolMembers)).forEach((type) => { + let data; + if (poolMemberMapping.isPoolMembersType(type)) { + data = allPoolMembers[type]; + if (Object.keys(data).length === 0) { + return; // do not create an empty pool members table + } + } else { + data = context.event.data[type]; + } if (typeof data !== 'object') { data = { value: data }; // make data an object } - if ((format === 'propertyBased') && azureUtil.isConfigItems(data, type)) { + if ((format === 'propertyBased') + && azureUtil.isConfigItems(data, type, poolMemberMapping.isPoolMembersType(type))) { data = azureUtil.transformConfigItems(data); + // If it is a pool, transfer its pool members to the pool members table of the corresponding type. + if (poolMemberMapping.isPoolType(type)) { + data.forEach((pool) => { + azureUtil.splitMembersFromPools(pool, + allPoolMembers[poolMemberMapping.getPoolMembersType(type)]); + }); + } } else { data = [data]; // place in array per API spec } diff --git a/src/lib/consumers/shared/azureUtil.js b/src/lib/consumers/shared/azureUtil.js index 3eafcdc1..dfaeac81 100644 --- a/src/lib/consumers/shared/azureUtil.js +++ b/src/lib/consumers/shared/azureUtil.js @@ -9,7 +9,7 @@ 'use strict'; const crypto = require('crypto'); - +const hasProperty = require('lodash/has'); const promiseUtil = require('../../utils/promise'); const requestsUtil = require('../../utils/requests'); @@ -329,10 +329,18 @@ function getInstrumentationKeys(context) { * * @param {Object} data - data to send to the consumer * @param {String} type - type of the data + * @param {Boolean} isPoolMembersType - true if type is one of pool member types * * @returns {Boolean} true if keys can be dropped */ -function isConfigItems(data, type) { +function isConfigItems(data, type, isPoolMembersType) { + /* Pool members' top keys are artificially generated in splitMembersFromPools + in order to handle a pool member participating in several pools. + Thus, comparison of the top key and the pool member names is useless. */ + if (isPoolMembersType) { + return true; + } + // is it of type sslCerts or keys are of format of format /.../... if (type === 'sslCerts' || Object.keys(data).every((key) => /\/[^/]*\/.*/.test(key))) { // check that the key is the same as property 'name' @@ -354,6 +362,102 @@ function transformConfigItems(data) { return Object.keys(data).map((key) => data[key]); } +class ClassPoolToMembersMapping { + constructor() { + this.poolToMembersMapping = { + pools: 'poolMembers', + aPools: 'aPoolMembers', + aaaaPools: 'aaaaPoolMembers', + cnamePools: 'cnamePoolMembers', + mxPools: 'mxPoolMembers', + naptrPools: 'naptrPoolMembers', + srvPools: 'srvPoolMembers' + }; + } + + /** + * + * Check if "type" is a type of a pool + * + * @param {String} type - type of a data (a table name in Azure Logs) + * + * @returns {Boolean} Returns true iff "type" is a type of a pool + */ + isPoolType(type) { + return hasProperty(this.poolToMembersMapping, type); + } + + /** + * + * Check if "type" is a type of a pool members + * + * @param {String} type - type of a data (a table name in Azure Logs) + * + * @returns {Boolean} Returns true iff "type" is a type of a pool members + */ + isPoolMembersType(type) { + return Object.keys(this.poolToMembersMapping) + .some((poolType) => this.poolToMembersMapping[poolType] === type); + } + + /** + * + * Translate the pool type into the pool members Type + * + * @param {String} poolType - type of a pool + * + * @returns {String} Returns type of a pool members + */ + getPoolMembersType(poolType) { + return this.isPoolType(poolType) + ? this.poolToMembersMapping[poolType] + : null; + } + + /** + * + * Build an object that will complete the data received by the consumer. + * It will eventually contain all the data of pool members. + * The top keys (e.g. "poolMembers") will become table names in Azure Logs. + * + * @param {Object} allPoolMembers - object containing pool member data + */ + buildPoolMemeberHolder(allPoolMembers) { + Object.keys(this.poolToMembersMapping).forEach((poolType) => { + // initially there are no pool members + allPoolMembers[this.poolToMembersMapping[poolType]] = {}; + }); + } +} + +/** + * + * Remove pool members from the pools, and add them to the object of all pool members. + * + * @param {Object} pool - a pool object that also contains its pool members + * @param {Object} poolMembersOfAType - the object that contains all pool members of all the pools of a particular type + */ +function splitMembersFromPools(pool, poolMembersOfAType) { + if (pool.members && typeof pool.members === 'object') { + Object.keys(pool.members).forEach((poolMember) => { + const poolMemberObj = pool.members[poolMember]; + if (typeof poolMemberObj === 'object' && poolMemberObj.poolName) { + /* Create a unique name composed of the pool member name and the pool name + in order to handle a pool member participating in several pools. + This name will be discarded later by transformConfigItems */ + const compositeName = poolMember.concat('-separator-', poolMemberObj.poolName); + poolMembersOfAType[compositeName] = poolMemberObj; + // Pool member name might not be configured + poolMembersOfAType[compositeName].name = poolMember; + delete pool.members[poolMember]; + } + }); + if (Object.keys(pool.members).length === 0) { + delete pool.members; + } + } +} + /** * * Some columns are reserved for Azure Log Analytics. @@ -383,5 +487,7 @@ module.exports = { getInstanceMetadata, isConfigItems, transformConfigItems, + ClassPoolToMembersMapping, + splitMembersFromPools, scrubReservedKeys }; diff --git a/src/lib/consumers/shared/metricsUtil.js b/src/lib/consumers/shared/metricsUtil.js index 7aaec12a..25819d01 100644 --- a/src/lib/consumers/shared/metricsUtil.js +++ b/src/lib/consumers/shared/metricsUtil.js @@ -8,8 +8,8 @@ 'use strict'; -const FLOAT_REGEXP_STRICT = /^([+-]?(?:\d+(?:\.\d+)?|\.\d+)(?:[eE][+-]?\d+)?)$/; -const FLOAT_REGEXP = /([+-]?(?:\d+(?:\.\d+)?|\.\d+)(?:[eE][+-]?\d+)?)/; +const metricsUtil = require('../../utils/metrics'); + const ISO_DATE_REGEXP = /^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$/; /** @@ -108,9 +108,9 @@ module.exports = { if (typeof parsedVal === 'string') { // still have to parse data despite on "options.parseMetrics" value // to differentiate between "metric" and "non-metric" data - parsedVal = parseNumberStrict(itemData); + parsedVal = metricsUtil.parseNumberStrict(itemData); if (parsedVal === false && tagsToMetrics.indexOf(itemKey) !== -1) { - parsedVal = parseNumber(itemData); + parsedVal = metricsUtil.parseNumber(itemData); } } if (Number.isFinite(parsedVal)) { @@ -198,41 +198,6 @@ function objectHasKeys(obj) { return false; } -/** - * Parses string to integer or float (fetches first number) - * - * @param {string} val - string to parse - * - * @returns {number | boolean} parsed number or false if unable to parse it - */ -function parseNumber(val) { - val = val.match(FLOAT_REGEXP); - if (val) { - val = parseFloat(val[0]); - if (typeof val === 'number' && Number.isFinite(val)) { - return val; - } - } - return false; -} - -/** - * Parses string to integer or float (only digits and '.' allowed) - * - * @param {string} val - string to parse - * - * @returns {number | boolean} parsed number or false if unable to parse it - */ -function parseNumberStrict(val) { - if (FLOAT_REGEXP_STRICT.test(val)) { - val = parseFloat(val); - if (typeof val === 'number' && Number.isFinite(val)) { - return val; - } - } - return false; -} - /** * @callback MetricFoundCb * @param {Array} path - metric path diff --git a/src/lib/systemStats.js b/src/lib/systemStats.js index 445c8f1d..99150374 100644 --- a/src/lib/systemStats.js +++ b/src/lib/systemStats.js @@ -111,17 +111,19 @@ function SystemStats(config) { */ SystemStats.prototype._preprocessEndpoints = function (endpoints) { // Deep copy so endpoint object is not modified in the saved configuration - const processedEndpoints = util.deepCopy(endpoints); - Object.keys(processedEndpoints).forEach((endpoint) => { - if (processedEndpoints[endpoint].protocol === 'snmp') { - processedEndpoints[endpoint].body = { + endpoints = util.deepCopy(endpoints); + Object.keys(endpoints).forEach((endpoint) => { + if (endpoints[endpoint].protocol === 'snmp') { + const additionalOptions = endpoints[endpoint].numericalEnums ? 'e' : ''; + + endpoints[endpoint].body = { command: 'run', - utilCmdArgs: `-c "snmpwalk -L n -O qUs -c public localhost ${endpoints[endpoint].path}"` + utilCmdArgs: `-c "snmpwalk -L n -O ${additionalOptions}QUs -c public localhost ${endpoints[endpoint].path}"` }; - processedEndpoints[endpoint].path = '/mgmt/tm/util/bash'; + endpoints[endpoint].path = '/mgmt/tm/util/bash'; } }); - return processedEndpoints; + return endpoints; }; SystemStats.prototype._getNormalizationOpts = function (property) { diff --git a/src/lib/utils/metrics.js b/src/lib/utils/metrics.js new file mode 100644 index 00000000..34d92a64 --- /dev/null +++ b/src/lib/utils/metrics.js @@ -0,0 +1,49 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const FLOAT_REGEXP_STRICT = /^([+-]?(?:\d+(?:\.\d+)?|\.\d+)(?:[eE][+-]?\d+)?)$/; +const FLOAT_REGEXP = /([+-]?(?:\d+(?:\.\d+)?|\.\d+)(?:[eE][+-]?\d+)?)/; + +module.exports = { + /** + * Parses string to integer or float (fetches first number) + * + * @param {string} val - string to parse + * + * @returns {number | boolean} parsed number or false if unable to parse it + */ + parseNumber(val) { + val = val.match(FLOAT_REGEXP); + if (val) { + val = parseFloat(val[0]); + if (typeof val === 'number' && Number.isFinite(val)) { + return val; + } + } + return false; + }, + + /** + * Parses string to integer or float (only digits and '.' allowed) + * + * @param {string} val - string to parse + * + * @returns {number | boolean} parsed number or false if unable to parse it + */ + parseNumberStrict(val) { + if (FLOAT_REGEXP_STRICT.test(val)) { + val = parseFloat(val); + if (typeof val === 'number' && Number.isFinite(val)) { + return val; + } + } + return false; + } +}; diff --git a/src/lib/utils/normalize.js b/src/lib/utils/normalize.js index fda4c072..dd01176c 100644 --- a/src/lib/utils/normalize.js +++ b/src/lib/utils/normalize.js @@ -10,9 +10,9 @@ const toCamelCase = require('lodash/camelCase'); -const logger = require('../logger'); // eslint-disable-line no-unused-vars -const util = require('./misc'); const constants = require('../constants'); +const metricsUtil = require('./metrics'); +const util = require('./misc'); const TRUTHY_REGEXP = /^\s*(true|1|on|yes)\s*$/i; @@ -504,15 +504,24 @@ module.exports = { restructureSNMPEndpoint(args) { const data = (args.data.commandResult || '').trim(); const result = {}; + + /** + * data expected to be like following: + * sysStatMemoryUsed.0 = 290295264\nsysStatMemoryUsed.1 = 34545\nifAdmin.up = up\n + */ data.split('\n').forEach((row) => { - const rowParts = row.split(' '); - const key = rowParts[0]; - let value = rowParts[1]; - if (typeof key !== 'undefined' && typeof value !== 'undefined') { - if (!Number.isNaN(value)) { - value = Number(value); - } - result[key] = value; + const rowParts = row.split(' = '); + // should be something like ['sysStatMemoryUsed.0', '290295264'] + // ignore if something different + if (rowParts.length === 2) { + const key = rowParts[0]; + const value = rowParts[1]; + + // value is string or number + const parsedVal = metricsUtil.parseNumberStrict(value); + result[key] = Number.isFinite(parsedVal) + ? parsedVal + : value; } }); return result; diff --git a/src/schema/1.30.0/actions_schema.json b/src/schema/1.30.0/actions_schema.json new file mode 100644 index 00000000..c658439b --- /dev/null +++ b/src/schema/1.30.0/actions_schema.json @@ -0,0 +1,187 @@ +{ + "$id": "actions_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming Actions schema", + "description": "", + "type": "object", + "definitions": { + "baseActionsChain": { + "title": "Chain of Actions", + "description": "Actions to be performed on the data.", + "type": "array", + "items": { + "$ref": "#/definitions/baseActionObject" + } + }, + "baseActionObject": { + "title": "Base Action object", + "description": "Base object to build actions.", + "type": "object", + "properties": { + "enable": { + "title": "Enable", + "description": "Whether to enable this action in the declaration or not.", + "type": "boolean", + "default": true + } + } + }, + "baseConditionalActionObject": { + "title": "Base Action object with support for conditional statements", + "description": "Base Action object with support for conditional statements.", + "type": "object", + "allOf": [ + { "$ref": "#/definitions/baseActionObject" }, + { + "anyOf": [ + { + "properties": { + "ifAllMatch": { + "title": "If All Match", + "description": "The conditions that will be checked against. All must be true.", + "type": "object", + "additionalProperties": true + } + }, + "not": { "required": ["ifAnyMatch"] } + }, + { + "properties": { + "ifAnyMatch": { + "title": "If Any Match", + "description": "An array of ifAllMatch objects. Any individual ifAllMatch object may match, but each condition within an ifAllMatch object must be true", + "type": "array" + } + }, + "not": { "required": ["ifAllMatch"] } + } + ] + } + ] + }, + "subLocation": { + "title": "Location", + "description": "Used to specify a location in TS data. Use boolean type with value true to specify the location.", + "oneOf": [ + { + "type": "boolean", + "const": true + }, + { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/subLocation" + } + } + ] + }, + "locations": { + "title": "Location", + "description": "The location(s) to apply the action.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/subLocation" + } + }, + "setTagAction": { + "title": "setTag Action", + "description": "Action to assign a tag(s) to particular or default location", + "type": "object", + "allOf": [ + { "$ref": "#/definitions/baseConditionalActionObject" }, + { + "properties": { + "setTag": { + "title": "Set Tag", + "description": "The tag values to be added.", + "type": "object", + "additionalProperties": true + }, + "locations": { + "title": "Location", + "description": "The location(s) to apply the action.", + "allOf": [{ "$ref": "#/definitions/locations" }] + }, + "enable": {}, + "ifAllMatch": {}, + "ifAnyMatch": {} + }, + "additionalProperties": false, + "required": ["setTag"] + } + ] + }, + "includeDataAction": { + "title": "includeData Action", + "description": "Action to specify data fields to include in the output", + "type": "object", + "allOf": [ + { "$ref": "#/definitions/baseConditionalActionObject" }, + { + "properties": { + "includeData": { + "title": "Include Data", + "description": "The data fields to include in the output", + "type": "object", + "additionalProperties": false + }, + "locations": { + "title": "Location", + "description": "The location(s) to apply the action.", + "allOf": [{ "$ref": "#/definitions/locations" }] + }, + "enable": {}, + "ifAllMatch": {}, + "ifAnyMatch": {} + }, + "additionalProperties": false, + "required": ["includeData", "locations"] + } + ] + }, + "excludeDataAction": { + "title": "excludeData Action", + "description": "Action to specify data fields to exclude form the output", + "type": "object", + "allOf": [ + { "$ref": "#/definitions/baseConditionalActionObject" }, + { + "properties": { + "excludeData": { + "title": "Exclude Data", + "description": "The data fields to exclude from the output", + "type": "object", + "additionalProperties": false + }, + "locations": { + "title": "Location", + "description": "The location(s) to apply the action.", + "allOf": [{ "$ref": "#/definitions/locations" }] + }, + "enable": {}, + "ifAllMatch": {}, + "ifAnyMatch": {} + }, + "additionalProperties": false, + "required": ["excludeData", "locations"] + } + ] + }, + "inputDataStreamActionsChain": { + "title": "", + "description": "", + "allOf": [ + { "$ref": "#/definitions/baseActionsChain" }, + { + "items": { + "oneOf": [ + { "$ref": "#/definitions/excludeDataAction" }, + { "$ref": "#/definitions/includeDataAction" }, + { "$ref": "#/definitions/setTagAction" } + ] + } + } + ] + } + } +} \ No newline at end of file diff --git a/src/schema/1.30.0/base_schema.json b/src/schema/1.30.0/base_schema.json new file mode 100644 index 00000000..2b1f9859 --- /dev/null +++ b/src/schema/1.30.0/base_schema.json @@ -0,0 +1,310 @@ +{ + "$id": "base_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming", + "description": "", + "type": "object", + "definitions": { + "enable": { + "title": "Enable", + "description": "This property can be used to enable/disable the poller/listener" , + "type": "boolean" + }, + "trace": { + "title": "Trace", + "description": "Enables data dumping to file. Boolean uses pre-defined file location, however value could be a string which contains path to a specific file instead" , + "minLength": 1, + "type": ["boolean", "string"] + }, + "traceConfig": { + "title": "Trace (v2)", + "description": "Enables data dumping to file. Boolean uses pre-defined file location, however value could be a string which contains path to a specific file instead", + "type": "object", + "properties": { + "type": { + "title": "Trace type", + "description": "Trace type - output data or input data", + "type": "string", + "enum": ["output", "input"] + }, + "path": { + "title": "Path to trace file", + "description": "Path to trace file to write data to", + "type": "string", + "minLength": 1 + } + }, + "required": ["type"] + }, + "traceV2": { + "title": "Trace (v2)", + "description": "Enables data dumping to file. Boolean uses pre-defined file location, however value could be a string which contains path to a specific file instead", + "oneOf": [ + { "$ref": "#/definitions/traceConfig" }, + { + "type": "array", + "minItems": 1, + "maxItems": 2, + "uniqueItemProperties": ["type"], + "items": { + "allOf": [{ + "$ref": "#/definitions/traceConfig" + }] + } + } + ] + }, + "secret": { + "title": "Passphrase (secret)", + "description": "" , + "type": "object", + "properties": { + "class": { + "title": "Class", + "description": "Telemetry streaming secret class", + "type": "string", + "enum": [ "Secret" ], + "default": "Secret" + }, + "cipherText": { + "title": "Cipher Text: this contains a secret to encrypt", + "type": "string" + }, + "environmentVar": { + "title": "Environment Variable: this contains the named env var where the secret resides", + "type": "string", + "minLength": 1 + }, + "protected": { + "$comment": "Meta property primarily used to determine if 'cipherText' needs to be encrypted", + "title": "Protected", + "type": "string", + "enum": [ "plainText", "plainBase64", "SecureVault" ], + "default": "plainText" + } + }, + "oneOf": [ + { "required": [ "cipherText" ] }, + { "required": [ "environmentVar" ] } + ], + "f5secret": true + }, + "username": { + "$comment": "Common field for username to use everywhere in scheme", + "title": "Username", + "type": "string", + "minLength": 1 + }, + "stringOrSecret": { + "allOf": [ + { + "if": { "type": "string" }, + "then": {}, + "else": {} + }, + { + "if": { "type": "object" }, + "then": { "$ref": "base_schema.json#/definitions/secret" }, + "else": {} + } + ] + }, + "constants": { + "title": "Constants", + "description": "" , + "type": "object", + "properties": { + "class": { + "title": "Class", + "description": "Telemetry streaming constants class", + "type": "string", + "enum": [ "Constants" ] + } + }, + "additionalProperties": true + }, + "tag": { + "$comment": "Defaults do not get applied for $ref objects, so place defaults alongside instead.", + "title": "Tag", + "description": "" , + "type": "object", + "properties": { + "tenant": { + "title": "Tenant tag", + "type": "string", + "minLength": 1 + }, + "application": { + "title": "Application tag", + "type": "string", + "minLength": 1 + } + }, + "additionalProperties": true + }, + "match": { + "$comment": "Defaults do not get applied for $ref objects, so place defaults alongside instead.", + "title": "Pattern to filter data", + "description": "", + "type": "string" + }, + "enableHostConnectivityCheck": { + "$comment": "This property can be used to enable/disable the host connectivity check in configurations where this is in effect", + "title": "Host", + "description": "" , + "type": "boolean" + }, + "allowSelfSignedCert": { + "$comment": "This property can be used by consumers, system pollers to enable/disable SSL Cert check", + "title": "Allow Self-Signed Certificate", + "description": "" , + "type": "boolean" + }, + "host": { + "$comment": "This property can be used by consumers, system pollers", + "title": "Host", + "description": "" , + "type": "string", + "minLength": 1, + "anyOf": [ + { "format": "ipv4" }, + { "format": "ipv6" }, + { "format": "hostname" } + ], + "hostConnectivityCheck": true + }, + "port": { + "title": "Port", + "description": "" , + "type": "integer", + "minimum": 0, + "maximum": 65535 + }, + "protocol": { + "title": "Protocol", + "description": "" , + "type": "string", + "enum": [ "http", "https" ] + }, + "proxy": { + "title": "Proxy Configuration", + "description": "", + "type": "object", + "dependencies": { + "passphrase": [ "username" ] + }, + "required": [ "host" ], + "properties": { + "host": { + "$ref": "#/definitions/host" + }, + "port": { + "default": 80, + "allOf": [ + { + "$ref": "#/definitions/port" + } + ] + }, + "protocol": { + "default": "http", + "allOf": [ + { + "$ref": "#/definitions/protocol" + } + ] + }, + "enableHostConnectivityCheck": { + "$ref": "#/definitions/enableHostConnectivityCheck" + }, + "allowSelfSignedCert": { + "$ref": "#/definitions/allowSelfSignedCert" + }, + "username": { + "$ref": "#/definitions/username" + }, + "passphrase": { + "$ref": "#/definitions/secret" + } + }, + "additionalProperties": false + } + }, + "properties": { + "class": { + "title": "Class", + "description": "Telemetry streaming top level class", + "type": "string", + "enum": [ "Telemetry" ] + }, + "schemaVersion": { + "title": "Schema version", + "description": "Version of ADC Declaration schema this declaration uses", + "type": "string", + "$comment": "IMPORTANT: In enum array, please put current schema version first, oldest-supported version last. Keep enum array sorted most-recent-first.", + "enum": [ "1.30.0", "1.29.0", "1.28.0", "1.27.1", "1.27.0", "1.26.0", "1.25.0", "1.24.0", "1.23.0", "1.22.0", "1.21.0", "1.20.1", "1.20.0", "1.19.0", "1.18.0", "1.17.0", "1.16.0", "1.15.0", "1.14.0", "1.13.0", "1.12.0", "1.11.0", "1.10.0", "1.9.0", "1.8.0", "1.7.0", "1.6.0", "1.5.0", "1.4.0", "1.3.0", "1.2.0", "1.1.0", "1.0.0", "0.9.0" ], + "default": "1.30.0" + }, + "$schema": { + "title": "Schema", + "description": "", + "type": "string" + } + }, + "additionalProperties": { + "$comment": "AJV does not resolve defaults inside oneOf/anyOf, so instead use allOf. Any schema refs should also use allOf with an if/then/else on class", + "properties": { + "class": { + "title": "Class", + "type": "string", + "enum": [ + "Telemetry_System", + "Telemetry_System_Poller", + "Telemetry_Listener", + "Telemetry_Consumer", + "Telemetry_Pull_Consumer", + "Telemetry_iHealth_Poller", + "Telemetry_Endpoints", + "Telemetry_Namespace", + "Controls", + "Shared" + ] + } + }, + "allOf": [ + { + "$ref": "system_schema.json#" + }, + { + "$ref": "system_poller_schema.json#" + }, + { + "$ref": "listener_schema.json#" + }, + { + "$ref": "consumer_schema.json#" + }, + { + "$ref": "pull_consumer_schema.json#" + }, + { + "$ref": "ihealth_poller_schema.json#" + }, + { + "$ref": "endpoints_schema.json#" + }, + { + "$ref": "controls_schema.json#" + }, + { + "$ref": "shared_schema.json#" + }, + { + "$ref": "namespace_schema.json#" + } + ] + }, + "required": [ + "class" + ] +} diff --git a/src/schema/1.30.0/consumer_schema.json b/src/schema/1.30.0/consumer_schema.json new file mode 100644 index 00000000..06cc47cf --- /dev/null +++ b/src/schema/1.30.0/consumer_schema.json @@ -0,0 +1,1386 @@ +{ + "$id": "consumer_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming Consumer schema", + "description": "", + "type": "object", + "definitions": { + "jmesPathAction": { + "title": "JMESPath Action", + "description": "Will use a JMESPath expression to modify the incoming data payload", + "type": "object", + "allOf": [ + { "$ref": "actions_schema.json#/definitions/baseActionObject" }, + { + "properties": { + "JMESPath": { + "title": "JMESPath", + "description": "Will use a JMESPath expression to modify the incoming data payload", + "type": "object", + "additionalProperties": false + }, + "expression": { + "title": "Expression", + "description": "The JMESPath expression to be applied to the incoming data payload", + "type": "string", + "minLength": 1 + }, + "enable": {} + }, + "additionalProperties": false, + "required": ["JMESPath", "expression"] + } + ] + }, + "autoTaggingStatsd": { + "title": "Statsd auto tagging", + "description": "Will parse incoming payload for values to automatically add as tags.", + "type": "object", + "properties": { + "method": { + "title": "AutoTagging method", + "description": "AutoTagging method to use to fetch tags", + "type": "string", + "enum": ["sibling"] + } + }, + "additionalProperties": false, + "required": ["method"] + }, + "genericHttpActions": { + "title": "Actions", + "description": "Actions to be performed on the Generic HTTP Consumer.", + "allOf": [ + { "$ref": "actions_schema.json#/definitions/baseActionsChain" }, + { + "items": { + "oneOf": [ + { "$ref": "#/definitions/jmesPathAction" } + ] + } + } + ] + }, + "host": { + "$comment": "Required for certain consumers: standard property", + "title": "Host", + "description": "FQDN or IP address" , + "type": "string", + "minLength": 1, + "anyOf": [ + { "format": "ipv4" }, + { "format": "ipv6" }, + { "format": "hostname" } + ], + "hostConnectivityCheck": true + }, + "protocols": { + "$comment": "Required for certain consumers: standard property", + "title": "Protocols (all)", + "description": "" , + "type": "string", + "enum": [ "https", "http", "tcp", "udp", "binaryTcpTls", "binaryTcp" ] + }, + "port": { + "$comment": "Required for certain consumers: standard property", + "title": "Port", + "description": "" , + "type": "integer", + "minimum": 0, + "maximum": 65535 + }, + "path": { + "$comment": "Required for certain consumers: standard property", + "title": "Path", + "description": "Path to post data to", + "type": ["string", "object"], + "minLength": 1, + "f5expand": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/stringOrSecret" + } + ] + }, + "method": { + "$comment": "Required for certain consumers: standard property", + "title": "Method", + "description": "HTTP method to use (limited to sensical choices)" , + "type": "string", + "enum": [ "POST", "GET", "PUT" ] + }, + "headers": { + "$comment": "Required for certain consumers: standard property", + "title": "Headers", + "description": "HTTP headers to use" , + "type": "array", + "items": { + "properties": { + "name": { + "description": "Name of this header", + "type": "string", + "f5expand": true, + "minLength": 1 + }, + "value": { + "description": "Value of this header", + "type": ["string", "object"], + "f5expand": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/stringOrSecret" + } + ] + } + }, + "required": [ + "name", + "value" + ], + "additionalProperties": false + } + }, + "customOpts": { + "$comment": "Required for certain consumers: standard property", + "title": "Custom Opts (Client Library Dependent)", + "description": "Additional options for use by consumer client library. Refer to corresponding consumer lib documentation for acceptable keys and values." , + "type": "array", + "items": { + "properties": { + "name": { + "description": "Name of the option", + "type": "string", + "f5expand": true, + "minLength": 1 + }, + "value": { + "description": "Value of the option", + "minLength": 1, + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "allOf": [ + { + "f5expand": true + }, + { + "$ref": "base_schema.json#/definitions/stringOrSecret" + } + ] + } + ] + } + }, + "required": [ + "name", + "value" + ], + "additionalProperties": false + }, + "minItems": 1 + }, + "format": { + "$comment": "Required for certain consumers: Splunk and Azure_Log_Analytics", + "title": "Format (informs consumer additional formatting may be required)", + "description": "", + "type": "string" + }, + "username": { + "$comment": "Required for certain consumers: standard property", + "title": "Username", + "description": "" , + "minLength": 1, + "type": "string", + "f5expand": true + }, + "region": { + "$comment": "Required for certain consumers: AWS_CloudWatch, AWS_S3, Azure_Log_Analytics, Azure_App_Insights, DataDog", + "title": "Region", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "endpointUrl": { + "$comment": "Required for certain consumers: AWS_CloudWatch, AWS_S3", + "title": "endpoint url", + "description": "The full endpoint URL for service requests", + "type": "string", + "minLength": 1, + "f5expand": true + }, + "bucket": { + "$comment": "Required for certain consumers: AWS_S3", + "title": "Bucket", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "maxAwsLogBatchSize": { + "$comment": "Required for certain consumers: AWS_CloudWatch", + "title": "Maximum Batch Size", + "description": "The maximum number of telemetry items to include in a payload to the ingestion endpoint", + "type": "integer", + "minimum": 1, + "default": 100, + "maximum": 10000 + }, + "logGroup": { + "$comment": "Required for certain consumers: AWS_CloudWatch", + "title": "Log Group", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "logStream": { + "$comment": "Required for certain consumers: AWS_CloudWatch", + "title": "Log Stream", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "metricNamespace": { + "$comment": "Required for certain consumers: AWS_CloudWatch", + "title": "Metric Namespace", + "description": "The namespace for the metrics", + "type": "string", + "f5expand": true, + "minLength": 1 + }, + "metricPrefix": { + "$comment": "Required for certain consumers: DataDog", + "title": "Metric Prefix", + "description": "The string value(s) to use as a metric prefix", + "type": "array", + "minItems": 1, + "items": { + "allOf": [{ + "type": "string", + "f5expand": true, + "minLength": 1 + }] + } + }, + "workspaceId": { + "$comment": "Required for certain consumers: Azure_Log_Analytics", + "title": "Workspace ID", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "useManagedIdentity": { + "$comment": "Required for certain consumers: Azure_Log_Analytics and Azure_Application_Insights", + "title": "Use Managed Identity", + "description": "Determines whether to use Managed Identity to perform authorization for Azure services", + "type": "boolean", + "default": false + }, + "appInsightsResourceName": { + "$comment": "Required for certain consumers: Azure_Application_Insights", + "title": "Application Insights Resource Name (Pattern)", + "description": "Name filter used to determine which App Insights resource to send metrics to. If not provided, TS will send metrics to App Insights in the subscription in which the managed identity has permissions to", + "type": "string", + "minLength": 1 + }, + "instrumentationKey": { + "$comment": "Required for certain consumers: Azure_Application_Insights", + "title": "Instrumentation Key", + "description": "Used to determine which App Insights resource to send metrics to", + "anyOf": [ + { + "type": "string", + "f5expand": true, + "minLength": 1 + }, + { + "type":"array", + "items": { + "type": "string", + "f5expand": true, + "minLength": 1 + }, + "minItems": 1 + } + ] + }, + "maxBatchIntervalMs": { + "$comment": "Required for certain consumers: Azure_Application_Insights", + "title": "Maximum Batch Interval (ms)", + "description": "The maximum amount of time to wait in milliseconds to for payload to reach maxBatchSize", + "type": "integer", + "minimum": 1000, + "default": 5000 + }, + "maxBatchSize": { + "$comment": "Required for certain consumers: Azure_Application_Insights", + "title": "Maximum Batch Size", + "description": "The maximum number of telemetry items to include in a payload to the ingestion endpoint", + "type": "integer", + "minimum": 1, + "default": 250 + }, + "topic": { + "$comment": "Required for certain consumers: Kafka", + "title": "Topic", + "description": "" , + "type": "string", + "f5expand": true + }, + "index": { + "$comment": "Required for certain consumers: ElasticSearch", + "title": "Index Name", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "apiVersion": { + "$comment": "Required for certain consumers: ElasticSearch", + "title": "API Version", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "dataType": { + "$comment": "Required for certain consumers: AWS_CloudWatch, ElasticSearch", + "title": "Data type", + "description": "" , + "type": "string", + "f5expand": true + }, + "authenticationProtocol": { + "$comment": "Required for certain consumers: Kafka", + "title": "Authentication Protocol", + "description": "" , + "type": "string", + "f5expand": true, + "enum": [ + "SASL-PLAIN", + "TLS", + "None" + ] + }, + "clientCertificate": { + "$comment": "Required for certain consumers: Kafka, Generic HTTP", + "title": "Client Certificate", + "description": "Certificate(s) to use when connecting to a secured endpoint.", + "type": "object", + "f5expand": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/secret" + } + ] + }, + "rootCertificate": { + "$comment": "Required for certain consumers: Kafka, Generic HTTP", + "title": "Root Certificate", + "description": "Certificate Authority root certificate, used to validate certificate chains.", + "type": "object", + "f5expand": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/secret" + } + ] + }, + "outputMode": { + "$comment": "Required for certain consumers: Generic HTTP", + "title": "output raw data flag", + "description": "Flag to request output of the raw data.", + "type": "string", + "enum": [ "processed", "raw" ] + }, + "projectId": { + "$comment": "Required for certain consumers: Google_Cloud_Monitoring", + "title": "Project ID", + "description": "The ID of the relevant project.", + "type": "string", + "minLength": 1, + "f5expand": true + }, + "serviceEmail": { + "$comment": "Required for certain consumers: Google_Cloud_Monitoring, Google_Cloud_Logging", + "title": "Service Email", + "description": "The service email.", + "type": "string", + "minLength": 1, + "f5expand": true + }, + "privateKeyId": { + "$comment": "Required for certain consumers when Service Account Token is not used: Google_Cloud_Monitoring, Google_Cloud_Logging", + "title": "Private Key ID", + "description": "The private key ID.", + "type": "string", + "minLength": 1, + "f5expand": true + }, + "useServiceAccountToken": { + "$comment": "Used by certain consumers: Google_Cloud_Monitoring, Google_Cloud_Logging", + "title": "Use Service Account Token", + "description": "Determines whether to use Service Account Token to perform authorization for Google services", + "type": "boolean", + "default": false + }, + "logScope": { + "$comment": "Required for certain consumers: Google_Cloud_Logging", + "title": "Logging Scope Type", + "description": "" , + "enum": ["projects", "organizations", "billingAccounts", "folders"], + "f5expand": true + }, + "logScopeId": { + "$comment": "Required for certain consumers: Google_Cloud_Logging", + "title": "Logging Scope ID", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "logId": { + "$comment": "Required for certain consumers: Google_Cloud_Logging", + "title": "Logging ID", + "description": "" , + "type": "string", + "format": "regex", + "pattern": "^[a-zA-z0-9._-]+$", + "minLength": 1, + "f5expand": true + }, + "privateKey": { + "$comment": "Required for certain consumers: Kafka, Generic HTTP", + "title": "Private Key", + "description": "Private Key", + "type": "object", + "f5expand": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/secret" + } + ] + }, + "eventSchemaVersion": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Event Schema Version", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true, + "default": "1" + }, + "f5csTenantId": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "F5CS Tenant ID", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "f5csSensorId": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "F5CS Sensor ID", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "payloadSchemaNid": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Namespace ID for payloadSchema", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "serviceAccount": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Service Account", + "description": "Service Account to authentication" , + "type": "object", + "properties": { + "authType": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "SA Type", + "description": "" , + "type": "string", + "enum": ["google-auth" ] + }, + "type": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "SA Type", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "projectId": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Project Id", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "privateKeyId": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Private Key Id", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "privateKey": { + "$ref": "base_schema.json#/definitions/secret" + }, + "clientEmail": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Client Email", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "clientId": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Client Id", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "authUri": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Auth Uri", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "tokenUri": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Token Uri", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "authProviderX509CertUrl": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Auth Provider X509 Cert Url", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "clientX509CertUrl": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Client X509 Cert Url", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + } + }, + "additionalProperties": false, + "allOf": [ + { + "if": { "properties": { "authType": { "const": "google-auth" } } }, + "then": { + "required": [ + "type", + "projectId", + "privateKeyId", + "privateKey", + "clientEmail", + "clientId", + "authUri", + "tokenUri", + "authProviderX509CertUrl", + "clientX509CertUrl" + ] + }, + "else": {} + }] + }, + "targetAudience": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "Target Audience", + "description": "" , + "type": "string", + "minLength": 1, + "f5expand": true + }, + "useSSL": { + "$comment": "Required for certain consumers: F5_Cloud", + "title": "useSSL", + "description": "To decide if GRPC connection should use SSL and then it is secured" , + "type": "boolean", + "f5expand": true + }, + "compressionType": { + "$comment": "Required for certain consumers: DataDog, Splunk", + "title": "Data compression", + "description": "Whether or not to compress data and what compression to use before sending it to destination", + "type": "string", + "enum": ["none", "gzip"] + }, + "reportInstanceMetadata": { + "$comment": "Required for certain consumers: Google_Cloud_Monitoring, Google_Cloud_Logging", + "title": "Instance metadata reporting", + "description": "Enables instance metadata collection and reporting" , + "type": "boolean", + "f5expand": true + }, + "apiKey": { + "$comment": "Required for certain consumers: DataDog", + "title": "API key to use to push data", + "type": "string", + "minLength": 1, + "f5expand": true + }, + "service": { + "$comment": "Required for certain consumers: DataDog", + "title": "The name of the service generating telemetry data", + "type": "string", + "minLength": 1, + "f5expand": true + }, + "convertBooleansToMetrics": { + "$comment": "Required for certain consumers: DataDog, Statsd, OpenTelemetry_Exporter", + "title": "Convert boolean values to metrics", + "description": "Whether or not to convert boolean values to metrics. True becomes 1, False becomes 0" , + "type": "boolean", + "f5expand": true, + "default": false + }, + "customTags": { + "$comment": "Required for certain consumers: DataDog", + "title": "Custom tags", + "description": "A collection of custom tags that are appended to the dynamically generated telemetry tags", + "type": "array", + "minItems": 1, + "items": { + "properties": { + "name": { + "description": "Name of this tag", + "type": "string", + "f5expand": true, + "minLength": 1 + }, + "value": { + "description": "Value of this tag", + "type": "string", + "f5expand": true, + "minLength": 1 + } + }, + "additionalProperties": false + } + } + }, + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_Consumer" } } }, + "then": { + "required": [ + "class", + "type" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming Consumer class", + "type": "string", + "enum": [ "Telemetry_Consumer" ] + }, + "enable": { + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "trace": { + "default": false, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/trace" + } + ] + }, + "type": { + "title": "Type", + "description": "" , + "type": "string", + "enum": [ + "AWS_CloudWatch", + "AWS_S3", + "Azure_Log_Analytics", + "Azure_Application_Insights", + "DataDog", + "default", + "ElasticSearch", + "Generic_HTTP", + "Google_Cloud_Logging", + "Google_Cloud_Monitoring", + "Google_StackDriver", + "Graphite", + "Kafka", + "OpenTelemetry_Exporter", + "Splunk", + "Statsd", + "Sumo_Logic", + "F5_Cloud" + ] + }, + "enableHostConnectivityCheck": { + "$ref": "base_schema.json#/definitions/enableHostConnectivityCheck" + }, + "allowSelfSignedCert": { + "default": false, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/allowSelfSignedCert" + } + ] + } + }, + "allOf": [ + { + "$comment": "This allows enforcement of no additional properties in this nested schema - could reuse above properties but prefer a separate block", + "properties": { + "addTags": {}, + "actions": {}, + "apiKey": {}, + "class": {}, + "customTags": {}, + "enable": {}, + "trace": {}, + "type": {}, + "enableHostConnectivityCheck": {}, + "allowSelfSignedCert": {}, + "host": {}, + "protocol": {}, + "port": {}, + "path": {}, + "method": {}, + "headers": {}, + "customOpts": {}, + "username": {}, + "passphrase": {}, + "format": {}, + "workspaceId": {}, + "useManagedIdentity": {}, + "instrumentationKey": {}, + "appInsightsResourceName": {}, + "maxBatchIntervalMs": {}, + "maxBatchSize": {}, + "region": {}, + "endpointUrl": {}, + "maxAwsLogBatchSize": {}, + "logGroup": {}, + "logStream": {}, + "metricNamespace": {}, + "metricPrefix": {}, + "bucket": {}, + "topic": {}, + "apiVersion": {}, + "index": {}, + "dataType": {}, + "authenticationProtocol": {}, + "projectId": {}, + "serviceEmail": {}, + "privateKey": {}, + "privateKeyId": {}, + "useServiceAccountToken": {}, + "clientCertificate": {}, + "rootCertificate": {}, + "outputMode": {}, + "fallbackHosts": {}, + "eventSchemaVersion": {}, + "f5csTenantId": {}, + "f5csSensorId": {}, + "payloadSchemaNid": {}, + "serviceAccount": {}, + "targetAudience": {}, + "useSSL": {}, + "proxy": {}, + "compressionType": {}, + "logScope": {}, + "logScopeId": {}, + "logId": {}, + "reportInstanceMetadata": {}, + "metricsPath": {}, + "service": {}, + "convertBooleansToMetrics": {} + }, + "additionalProperties": false, + "dependencies": { + "actions": { + "allOf": [ + { + "properties": { "type": { "const": "Generic_HTTP" } } + } + ] + } + } + }, + { + "if": { "properties": { "type": { "const": "default" } } }, + "then": { + "required": [], + "properties": {} + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Generic_HTTP" } } }, + "then": { + "required": [ + "host" + ], + "properties": { + "host": { "$ref": "#/definitions/host" }, + "fallbackHosts": { + "type": "array", + "description": "List FQDNs or IP addresses to be used as fallback hosts" , + "minItems": 1, + "items": { + "allOf": [{ + "$ref": "#/definitions/host" + }] + } + }, + "protocol": { "$ref": "#/definitions/protocols", "default": "https" }, + "port": { "$ref": "#/definitions/port", "default": 443 }, + "path": { "$ref": "#/definitions/path", "default": "/" }, + "method": { "$ref": "#/definitions/method", "default": "POST" }, + "headers": { "$ref": "#/definitions/headers" }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" }, + "proxy": { "$ref": "base_schema.json#/definitions/proxy" }, + "privateKey": { "$ref": "#/definitions/privateKey" }, + "clientCertificate": { "$ref": "#/definitions/clientCertificate" }, + "rootCertificate": { "$ref": "#/definitions/rootCertificate" }, + "outputMode": { "$ref": "#/definitions/outputMode", "default": "processed" }, + "actions": { "$ref": "#/definitions/genericHttpActions" } + }, + "allOf": [ + { + "if": { "required": [ "clientCertificate" ] }, + "then": { "required": [ "privateKey" ] } + }, + { + "if": { "required": [ "privateKey" ] }, + "then": { "required": [ "clientCertificate" ] } + } + ] + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Splunk" } } }, + "then": { + "required": [ + "host", + "passphrase" + ], + "properties": { + "host": { "$ref": "#/definitions/host" }, + "protocol": { "$ref": "#/definitions/protocols", "default": "https" }, + "port": { "$ref": "#/definitions/port", "default": 8088 }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" }, + "format": { "$ref": "#/definitions/format", "enum": [ "default", "legacy", "multiMetric" ], "default": "default" }, + "proxy": { "$ref": "base_schema.json#/definitions/proxy" }, + "compressionType": { "$ref": "#/definitions/compressionType", "default": "gzip" } + } + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Azure_Log_Analytics" } } }, + "then": { + "required": [ + "workspaceId" + ], + "properties": { + "workspaceId": { "$ref": "#/definitions/workspaceId" }, + "format": { "$ref": "#/definitions/format", "enum": [ "default", "propertyBased" ], "default": "default" }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" }, + "useManagedIdentity": { "$ref": "#/definitions/useManagedIdentity", "default": false }, + "region": { "$ref": "#/definitions/region" } + }, + "allOf": [ + { + "dependencies": { + "passphrase": { + "anyOf": [ + { "not": {"required": [ "useManagedIdentity" ] } }, + { "properties": { "useManagedIdentity": { "const": false } } } + ] + } + } + }, + { + "if": { "not": { "required" : [ "useManagedIdentity"] } }, + "then": { "required": ["passphrase"] }, + "else": { + "if": { "properties": { "useManagedIdentity": { "const": true } } }, + "then": { "not": { "required": ["passphrase"] } }, + "else": { "required": ["passphrase"]} + } + } + ] + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Azure_Application_Insights" } } }, + "then": { + "properties": { + "instrumentationKey": { "$ref": "#/definitions/instrumentationKey" }, + "maxBatchSize": { "$ref": "#/definitions/maxBatchSize", "default": 250 }, + "maxBatchIntervalMs": { "$ref": "#/definitions/maxBatchIntervalMs", "default": 5000 }, + "customOpts": { "$ref": "#/definitions/customOpts" }, + "useManagedIdentity": { "$ref": "#/definitions/useManagedIdentity", "default": false }, + "appInsightsResourceName": { "$ref": "#/definitions/appInsightsResourceName" }, + "region": { "$ref": "#/definitions/region" } + }, + "allOf": [ + { + "dependencies": { + "instrumentationKey": { + "allOf": [ + { + "anyOf": [ + { "not": { "required": [ "useManagedIdentity" ] } }, + { "properties": { "useManagedIdentity": { "const": false } } } + ] + }, + { + "not": { "required": ["appInsightsResourceName"] } + } + ] + } + } + }, + { + "if": { "not": { "required" : [ "useManagedIdentity"] } }, + "then": { "required": ["instrumentationKey"] }, + "else": { + "if": { "properties": { "useManagedIdentity": { "const": true } } }, + "then": { "not": { "required": ["instrumentationKey"] } }, + "else": { + "allOf": [ + { "required": [ "instrumentationKey" ]}, + { "not": { "required": [ "appInsightsResourceName" ] } } + ] + } + } + }, + { + "if": { "required": [ "appInsightsResourceName" ] }, + "then": { "properties": { "appInsightsResourceName": { "minLength": 1 } }} + } + ] + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "AWS_CloudWatch" } } }, + "then": { + "required": [ + "region", + "dataType" + ], + "properties": { + "region": { "$ref": "#/definitions/region" }, + "dataType": { "$ref": "#/definitions/dataType", "default": "logs" }, + "username": { "$ref": "#/definitions/username" }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" }, + "endpointUrl": { "$ref": "#/definitions/endpointUrl" } + }, + "allOf": [ + { "not": { "required": ["username"], "not": { "required": ["passphrase"] }}}, + { "not": { "required": ["passphrase"], "not": { "required": ["username"] }}}, + { + "if": { "properties": { "dataType": { "enum": ["logs", null] } } }, + "then": { + "properties": { + "maxAwsLogBatchSize": { "$ref": "#/definitions/maxAwsLogBatchSize", "default": 100 } + }, + "required": ["maxAwsLogBatchSize"] + } + }, + { "oneOf": + [ + { + "allOf": [ + { + "properties": { + "logGroup": { "$ref": "#/definitions/logGroup" }, + "logStream": { "$ref": "#/definitions/logStream" }, + "dataType": { + "allOf": + [ + { "$ref": "#/definitions/dataType"}, + { "enum": ["logs", null] } + ] + } + } + }, + { "required":[ "logGroup", "logStream" ] }, + { "not": { "required": ["metricNamespace"] }} + ] + }, + { + "allOf": [ + { + "properties": { + "metricNamespace": { "$ref": "#/definitions/metricNamespace" }, + "dataType": { + "allOf": [ + { "$ref": "#/definitions/dataType"}, + { "enum": ["metrics"] } + ] + } + } + }, + { "required":[ "metricNamespace" ] }, + { "not": { "required":[ "maxAwsLogBatchSize" ] }}, + { "not": { "required":[ "logStream" ] }}, + { "not": { "required":[ "logGroup" ] }} + ] + } + ] + } + ] + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "AWS_S3" } } }, + "then": { + "required": [ + "region", + "bucket" + ], + "properties": { + "region": { "$ref": "#/definitions/region" }, + "bucket": { "$ref": "#/definitions/bucket" }, + "username": { "$ref": "#/definitions/username" }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" }, + "endpointUrl": { "$ref": "#/definitions/endpointUrl" } + }, + "dependencies": { + "passphrase": [ "username" ], + "username":[ "passphrase" ] + } + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Graphite" } } }, + "then": { + "required": [ + "host" + ], + "properties": { + "host": { "$ref": "#/definitions/host" }, + "protocol": { "$ref": "#/definitions/protocols", "default": "https" }, + "port": { "$ref": "#/definitions/port", "default": 443 }, + "path": { "$ref": "#/definitions/path", "default": "/events/" } + } + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Kafka" } } }, + "then": { + "required": [ + "host", + "topic" + ], + "properties": { + "authenticationProtocol": { "$ref": "#/definitions/authenticationProtocol", "default": "None" }, + "host": { "$ref": "#/definitions/host" }, + "protocol": { "$ref": "#/definitions/protocols", "default": "binaryTcpTls" }, + "port": { "$ref": "#/definitions/port", "default": 9092 }, + "topic": { "$ref": "#/definitions/topic" } + }, + "allOf": [ + { + "if": { "properties": { "authenticationProtocol": { "const": "SASL-PLAIN" } } }, + "then": { + "required": [ + "username" + ], + "properties": { + "username": { "$ref": "#/definitions/username" }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" } + }, + "dependencies": { + "passphrase": [ "username" ] + } + }, + "else": {} + }, + { + "if": { "properties": { "authenticationProtocol": { "const": "TLS" } } }, + "then": { + "required": [ + "privateKey", + "clientCertificate" + ], + "allOf": [ + { "not": { "required": [ "username" ] } }, + { "not": { "required": [ "passphrase" ] } } + ], + "properties": { + "privateKey": { "$ref": "#/definitions/privateKey" }, + "clientCertificate": { "$ref": "#/definitions/clientCertificate" }, + "rootCertificate": { "$ref": "#/definitions/rootCertificate" }, + "protocol": { "const": "binaryTcpTls" } + } + }, + "else": {} + } + ] + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "ElasticSearch" } } }, + "then": { + "required": [ + "host", + "index" + ], + "properties": { + "host": { "$ref": "#/definitions/host" }, + "protocol": { "$ref": "#/definitions/protocols", "default": "https" }, + "port": { "$ref": "#/definitions/port", "default": 9200 }, + "path": { "$ref": "#/definitions/path" }, + "username": { "$ref": "#/definitions/username" }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" }, + "apiVersion": { "$ref": "#/definitions/apiVersion", "default": "6.0" }, + "index": { "$ref": "#/definitions/index" } + }, + "allOf": [ + { + "if": { "properties": { "apiVersion": { "pattern": "^[0-6][.]|^[0-6]$" } } }, + "then": { + "properties": { + "dataType": { + "$ref": "#/definitions/dataType", + "default": "f5.telemetry", + "minLength": 1 + } + } + }, + "else": { + "if": { "properties": { "apiVersion": { "pattern": "^7[.]|^7$" } } }, + "then": { + "properties": { + "dataType": { + "$ref": "#/definitions/dataType", + "default": "_doc", + "minLength": 1 + } + } + }, + "else": { + "allOf": [ + { "not": { "required": [ "dataType" ] } } + ] + } + } + } + ] + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Sumo_Logic" } } }, + "then": { + "required": [ + "host", + "passphrase" + ], + "properties": { + "host": { "$ref": "#/definitions/host" }, + "protocol": { "$ref": "#/definitions/protocols", "default": "https" }, + "port": { "$ref": "#/definitions/port", "default": 443 }, + "path": { "$ref": "#/definitions/path", "default": "/receiver/v1/http/" }, + "passphrase": { "$ref": "base_schema.json#/definitions/secret" } + } + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Statsd" } } }, + "then": { + "required": [ + "host" + ], + "properties": { + "host": { "$ref": "#/definitions/host" }, + "protocol": { + "title": "Protocol", + "type": "string", + "enum": [ "tcp", "udp" ], + "default": "udp" + }, + "port": { "$ref": "#/definitions/port", "default": 8125 }, + "addTags": { "$ref": "#/definitions/autoTaggingStatsd" }, + "convertBooleansToMetrics": { "$ref": "#/definitions/convertBooleansToMetrics", "default": "false" } + } + }, + "else": {} + }, + { + "if": { + "properties": { "type": { "enum": ["Google_Cloud_Monitoring", "Google_StackDriver", "Google_Cloud_Logging"] } } + }, + "then": { + "required": [ + "serviceEmail" + ], + "properties": { + "privateKeyId": { "$ref": "#/definitions/privateKeyId" }, + "serviceEmail": { "$ref": "#/definitions/serviceEmail" }, + "privateKey": { "$ref": "base_schema.json#/definitions/secret" }, + "useServiceAccountToken": { "$ref": "#/definitions/useServiceAccountToken", "default": false }, + "reportInstanceMetadata": { "$ref": "#/definitions/reportInstanceMetadata", "default": false } + }, + "allOf": [ + { + "dependencies": { + "privateKeyId": { + "anyOf": [ + { "not": {"required": [ "useServiceAccountToken" ] } }, + { "properties": { "useServiceAccountToken": { "const": false } } } + ] + } + } + }, + { + "dependencies": { + "privateKey": { + "anyOf": [ + { "not": {"required": [ "useServiceAccountToken" ] } }, + { "properties": { "useServiceAccountToken": { "const": false } } } + ] + } + } + }, + { + "if": { + "anyOf": [ + { "not": { "required" : [ "useServiceAccountToken"] } }, + { "properties": { "useServiceAccountToken": { "const": false } } } + ] + }, + "then": { "required": ["privateKeyId", "privateKey"] }, + "else": { "not": { "required": ["privateKeyId", "privateKey"] } } + }, + { + "if": { "properties": { "type": { "enum": ["Google_Cloud_Monitoring", "Google_StackDriver"] } } }, + "then": { + "properties": { + "projectId": { "$ref": "#/definitions/projectId"} + }, + "required": ["projectId"] + } + }, + { + "if": { "properties": { "type": { "const": "Google_Cloud_Logging" } } }, + "then": { + "properties": { + "logScope": { "$ref": "#/definitions/logScope", "default": "projects" }, + "logScopeId": { "$ref": "#/definitions/logScopeId"}, + "logId": { "$ref": "#/definitions/logId"} + }, + "required": ["logScope", "logScopeId", "logId"] + } + } + ] + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "F5_Cloud" } } }, + "then": { + "required": [ + "f5csTenantId", + "f5csSensorId", + "payloadSchemaNid", + "serviceAccount", + "targetAudience" + ], + "properties": { + "port": { "$ref": "#/definitions/port", "default": 443 }, + "eventSchemaVersion": { "$ref": "#/definitions/eventSchemaVersion" }, + "f5csTenantId": { "$ref": "#/definitions/f5csTenantId" }, + "f5csSensorId": { "$ref": "#/definitions/f5csSensorId" }, + "payloadSchemaNid": { "$ref": "#/definitions/payloadSchemaNid" }, + "serviceAccount": { "$ref": "#/definitions/serviceAccount" }, + "targetAudience": { "$ref": "#/definitions/targetAudience" }, + "useSSL": { "$ref": "#/definitions/useSSL", "default": true } + }, + "nodeSupportVersion": "8.11.1" + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "DataDog" } } }, + "then": { + "required": [ + "apiKey" + ], + "properties": { + "apiKey": { "$ref": "#/definitions/apiKey" }, + "compressionType": { "$ref": "#/definitions/compressionType", "default": "none" }, + "region": { "$ref": "#/definitions/region", "enum": ["US1", "US3", "EU1", "US1-FED"], "default": "US1" }, + "service": { "$ref": "#/definitions/service", "default": "f5-telemetry" }, + "metricPrefix": { "$ref": "#/definitions/metricPrefix" }, + "convertBooleansToMetrics": { "$ref": "#/definitions/convertBooleansToMetrics", "default": "false" }, + "customTags": { "$ref": "#/definitions/customTags" } + } + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "OpenTelemetry_Exporter" } } }, + "then": { + "required": [ + "host", + "port" + ], + "properties": { + "host": { "$ref": "#/definitions/host" }, + "port": { "$ref": "#/definitions/port" }, + "headers": { "$ref": "#/definitions/headers" }, + "metricsPath": { "$ref": "#/definitions/path" }, + "convertBooleansToMetrics": { "$ref": "#/definitions/convertBooleansToMetrics", "default": "false" } + }, + "nodeSupportVersion": "8.11.1" + }, + "else": {} + } + ] + }, + "else": {} + } + ] +} diff --git a/src/schema/1.30.0/controls_schema.json b/src/schema/1.30.0/controls_schema.json new file mode 100644 index 00000000..2bbfcff8 --- /dev/null +++ b/src/schema/1.30.0/controls_schema.json @@ -0,0 +1,52 @@ +{ + "$id": "controls_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming Controls schema", + "description": "", + "type": "object", + "allOf": [ + { + "if": { "properties": { "class": { "const": "Controls" } } }, + "then": { + "required": [ + "class" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming Controls class", + "type": "string", + "enum": [ "Controls" ] + }, + "logLevel": { + "title": "Logging Level", + "description": "", + "type": "string", + "default": "info", + "enum": [ + "debug", + "info", + "error" + ] + }, + "debug": { + "title": "Enable debug mode", + "description": "", + "type": "boolean", + "default": false + }, + "memoryThresholdPercent": { + "title": "Memory Usage Threshold (Percentage of Available Process Memory)", + "description": "Once memory usage reaches this value, processing may temporarily cease until levels return below threshold. Defaults to 90%", + "type": "integer", + "minimum": 1, + "maximum": 100, + "default": 90 + } + }, + "additionalProperties": false + }, + "else": {} + } + ] +} \ No newline at end of file diff --git a/src/schema/1.30.0/endpoints_schema.json b/src/schema/1.30.0/endpoints_schema.json new file mode 100644 index 00000000..1e12b4b3 --- /dev/null +++ b/src/schema/1.30.0/endpoints_schema.json @@ -0,0 +1,190 @@ +{ + "$id": "endpoints_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming Endpoints schema", + "description": "", + "type": "object", + "definitions": { + "endpoint": { + "title": "Telemetry Endpoint", + "description": "", + "type": "object", + "properties": { + "enable": { + "title": "Enable endpoint", + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "name": { + "title": "Endpoint name", + "type": "string", + "minLength": 1 + }, + "numericalEnums": { + "title": "SNMP Options: print enums numerically", + "type": "boolean" + }, + "path": { + "title": "Path to query data from", + "type": "string", + "minLength": 1 + }, + "protocol": { + "title": "Endpoint protocol used to fetch data", + "type": "string", + "enum": ["http", "snmp"], + "default": "http" + } + }, + "allOf": [ + { + "if": { "properties": { "protocol": { "const": "snmp" } } }, + "then": { + "properties": { + "numericalEnums": { + "default": false + }, + "path": { + "pattern": "^[a-zA-Z0-9.]+$" + } + } + }, + "else": { + "not": { + "required": ["numericalEnums"] + } + } + } + ], + "additionalProperties": false + }, + "endpoints": { + "title": "Telemetry Endpoints", + "description": "", + "type": "object", + "properties": { + "enable": { + "title": "Enable endpoints", + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "basePath": { + "title": "Base Path", + "description": "Optional base path value to prepend to each individual endpoint paths", + "type": "string", + "default": "" + }, + "items": { + "title": "Items", + "description": "Object with each property an endpoint with their own properties", + "type": "object", + "additionalProperties": { + "allOf": [ + { + "$ref": "#/definitions/endpoint" + }, + { + "required": [ "path"] + } + ] + }, + "minProperties": 1 + } + } + }, + "endpointsObjectRef": { + "allOf": [ + { + "$ref": "#/definitions/endpoints" + }, + { + "properties": { + "enable": {}, + "basePath": {}, + "items": {} + }, + "required": [ "items" ], + "additionalProperties": false + } + ] + }, + "endpointObjectRef": { + "allOf": [ + { + "$ref": "#/definitions/endpoint" + }, + { + "properties": { + "enable": {}, + "name": {}, + "numericalEnums": {}, + "path": {}, + "protocol": {} + }, + "required": [ "name", "path" ], + "additionalProperties": false + } + ] + }, + "endpointsPointerRef": { + "title": "Telemetry_Endpoints Name", + "description": "Name of the Telemetry_Endpoints object", + "type": "string", + "declarationClass": "Telemetry_Endpoints", + "minLength": 1 + }, + "endpointsItemPointerRef": { + "title": "Telemetry_Endpoints Name and Item Key", + "description": "Name of the Telemetry_Endpoints object and the endpoint item key, e.g endpointsA/item1", + "type": "string", + "declarationClassProp": { + "path" :"Telemetry_Endpoints/items", + "partsNum": 2 + }, + "minLength": 1 + } + }, + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_Endpoints" } } }, + "then": { + "required": [ + "class", + "items" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming Endpoints class", + "type": "string", + "enum": [ "Telemetry_Endpoints" ] + } + }, + "allOf": [ + { + "$comment": "This allows enforcement of no additional properties in this nested schema - could reuse above properties but prefer a separate block", + "properties": { + "class": {}, + "enable": {}, + "basePath": {}, + "items": {} + }, + "additionalProperties": false + }, + { + "$ref": "#/definitions/endpoints" + } + ] + }, + "else": {} + } + ] +} \ No newline at end of file diff --git a/src/schema/1.30.0/ihealth_poller_schema.json b/src/schema/1.30.0/ihealth_poller_schema.json new file mode 100644 index 00000000..d5bcb9cf --- /dev/null +++ b/src/schema/1.30.0/ihealth_poller_schema.json @@ -0,0 +1,238 @@ +{ + "$id": "ihealth_poller_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming iHealth Poller schema", + "description": "", + "type": "object", + "definitions": { + "time24hr": { + "title": "Time in HH:MM, 24hr", + "description": "", + "type": "string", + "pattern": "^([0-9]|0[0-9]|1[0-9]|2[0-3]):[0-5][0-9]?$" + }, + "iHealthPoller": { + "$comment": "system_schema.json should be updated when new property added", + "title": "iHealth Poller", + "description": "", + "type": "object", + "required": [ + "interval", + "username", + "passphrase" + ], + "properties": { + "enable": { + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "trace": { + "default": false, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/trace" + } + ] + }, + "proxy": { + "title": "Proxy configuration", + "properties": { + "port": { + "default": 80 + }, + "protocol": { + "default": "http" + } + }, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/proxy" + } + ] + }, + "username": { + "title": "iHealth Username", + "$ref": "base_schema.json#/definitions/username" + }, + "passphrase": { + "title": "iHealth Passphrase", + "$ref": "base_schema.json#/definitions/secret" + }, + "downloadFolder": { + "title": "Directory to download Qkview to", + "description": "", + "type": "string", + "minLength": 1, + "pathExists": true + }, + "interval": { + "title": "Operating interval", + "description": "" , + "type": "object", + "properties": { + "timeWindow": { + "title": "Two or more hours window in 24hr format that iHealth data can be sent", + "description": "", + "type": "object", + "properties": { + "start": { + "title": "Time when the window starts", + "$ref": "#/definitions/time24hr" + }, + "end": { + "title": "Time when the window ends", + "$ref": "#/definitions/time24hr" + } + }, + "timeWindowMinSize": 120, + "required": [ "start", "end" ], + "additionalProperties": false + }, + "frequency": { + "title": "Interval frequency", + "description": "", + "type": "string", + "default": "daily", + "enum": [ + "daily", + "weekly", + "monthly" + ] + } + + }, + "required": [ + "timeWindow" + ], + "allOf": [ + { + "if": { "properties": { "frequency": { "const": "daily" } } }, + "then": { + "properties": { + "timeWindow": {}, + "frequency": {} + }, + "additionalProperties": false + } + }, + { + "if": { "properties": { "frequency": { "const": "weekly" } } }, + "then": { + "properties": { + "timeWindow": {}, + "frequency": {}, + "day": { + "title": "", + "description": "", + "oneOf": [ + { + "type": "string", + "pattern": "^([mM]onday|[tT]uesday|[wW]ednesday|[tT]hursday|[fF]riday|[sS]aturday|[sS]unday)$" + }, + { + "$comment": "0 and 7 eq. Sunday", + "type": "integer", + "minimum": 0, + "maximum": 7 + } + ] + } + }, + "required": [ "day" ], + "additionalProperties": false + } + }, + { + "if": { "properties": { "frequency": { "const": "monthly" } } }, + "then": { + "properties": { + "timeWindow": {}, + "frequency": {}, + "day": { + "title": "", + "description": "", + "type": "integer", + "minimum": 1, + "maximum": 31 + } + }, + "required": [ "day" ], + "additionalProperties": false + } + } + ] + } + } + }, + "iHealthPollerPointerRef": { + "type": "string", + "minLength": 1, + "declarationClass": "Telemetry_iHealth_Poller" + }, + "iHealthPollerObjectRef": { + "allOf": [ + { + "$comment": "This allows enforcement of no additional properties in this nested schema - could reuse above properties but prefer a separate block", + "properties": { + "enable": {}, + "trace": {}, + "interval": {}, + "proxy": {}, + "username": {}, + "passphrase": {}, + "downloadFolder": {} + }, + "additionalProperties": false + }, + { + "$ref": "ihealth_poller_schema.json#/definitions/iHealthPoller" + } + ] + } + }, + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_iHealth_Poller" } } }, + "then": { + "required": [ + "class", + "username", + "passphrase" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming iHealth Poller class", + "type": "string", + "enum": [ "Telemetry_iHealth_Poller" ] + } + }, + "allOf": [ + { + "$comment": "This allows enforcement of no additional properties in this nested schema - could reuse above properties but prefer a separate block", + "properties": { + "class": {}, + "enable": {}, + "trace": {}, + "interval": {}, + "proxy": {}, + "username": {}, + "passphrase": {}, + "downloadFolder": {} + }, + "additionalProperties": false + }, + { + "$ref": "#/definitions/iHealthPoller" + } + ] + }, + "else": {}, + "$comment": "Telemetry_iHealth_Poller should be either built-in within Telemetry_System or referenced by Telemetry_System(s), otherwise it will be treated as disabled" + } + ] +} \ No newline at end of file diff --git a/src/schema/1.30.0/listener_schema.json b/src/schema/1.30.0/listener_schema.json new file mode 100644 index 00000000..d3b9434e --- /dev/null +++ b/src/schema/1.30.0/listener_schema.json @@ -0,0 +1,85 @@ +{ + "$id": "listener_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming event listener schema", + "description": "", + "type": "object", + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_Listener" } } }, + "then": { + "required": [ + "class" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming Event Listener class", + "type": "string", + "enum": [ "Telemetry_Listener" ] + }, + "enable": { + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "trace": { + "default": false, + "oneOf": [ + { + "$ref": "base_schema.json#/definitions/trace" + }, + { + "$ref": "base_schema.json#/definitions/traceV2" + } + ] + }, + "port": { + "minimum": 1024, + "maximum": 65535, + "default": 6514, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/port" + } + ] + }, + "tag": { + "$comment": "Deprecated! Use actions with a setTag action.", + "allOf": [ + { + "$ref": "base_schema.json#/definitions/tag" + } + ] + }, + "match": { + "default": "", + "allOf": [ + { + "$ref": "base_schema.json#/definitions/match" + } + ] + }, + "actions": { + "title": "Actions", + "description": "Actions to be performed on the listener.", + "default": [ + { + "setTag": { + "tenant": "`T`", + "application": "`A`" + } + } + ], + "allOf": [{ "$ref": "actions_schema.json#/definitions/inputDataStreamActionsChain" }] + } + }, + "additionalProperties": false + }, + "else": {} + } + ] +} \ No newline at end of file diff --git a/src/schema/1.30.0/namespace_schema.json b/src/schema/1.30.0/namespace_schema.json new file mode 100644 index 00000000..f6cb09fc --- /dev/null +++ b/src/schema/1.30.0/namespace_schema.json @@ -0,0 +1,92 @@ +{ + "$id": "namespace_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming Namespace schema", + "description": "", + "type": "object", + "definitions": { + "namespace": { + "required": [ + "class" + ], + "type": "object", + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming Namespace class", + "type": "string", + "enum": [ "Telemetry_Namespace" ] + } + }, + "additionalProperties": { + "$comment": "All objects supported under a Telemetry Namespace", + "properties": { + "class": { + "title": "Class", + "type": "string", + "enum": [ + "Telemetry_System", + "Telemetry_System_Poller", + "Telemetry_Listener", + "Telemetry_Consumer", + "Telemetry_Pull_Consumer", + "Telemetry_iHealth_Poller", + "Telemetry_Endpoints", + "Shared" + ] + } + }, + "allOf": [ + { + "$ref": "system_schema.json#" + }, + { + "$ref": "system_poller_schema.json#" + }, + { + "$ref": "listener_schema.json#" + }, + { + "$ref": "consumer_schema.json#" + }, + { + "$ref": "pull_consumer_schema.json#" + }, + { + "$ref": "ihealth_poller_schema.json#" + }, + { + "$ref": "endpoints_schema.json#" + }, + { + "$ref": "shared_schema.json#" + } + ] + } + } + }, + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_Namespace" } } }, + "then": { + "required": [ + "class" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming Namespace class", + "type": "string", + "enum": [ "Telemetry_Namespace" ] + } + }, + "allOf": [ + { + "$ref": "#/definitions/namespace" + } + ] + }, + "else": {} + } + ] +} \ No newline at end of file diff --git a/src/schema/1.30.0/pull_consumer_schema.json b/src/schema/1.30.0/pull_consumer_schema.json new file mode 100644 index 00000000..0747cbfd --- /dev/null +++ b/src/schema/1.30.0/pull_consumer_schema.json @@ -0,0 +1,101 @@ +{ + "$id": "pull_consumer_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming Pull Consumer schema", + "description": "", + "type": "object", + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_Pull_Consumer" } } }, + "then": { + "required": [ + "class", + "type", + "systemPoller" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming Pull Consumer class", + "type": "string", + "enum": [ "Telemetry_Pull_Consumer" ] + }, + "enable": { + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "trace": { + "default": false, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/trace" + } + ] + }, + "type": { + "title": "Type", + "description": "" , + "type": "string", + "enum": [ + "default", + "Prometheus" + ] + }, + "systemPoller": { + "title": "Pointer to System Poller(s)", + "anyOf": [ + { + "$ref": "system_poller_schema.json#/definitions/systemPollerPointerRef" + }, + { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "system_poller_schema.json#/definitions/systemPollerPointerRef" + } + ] + }, + "minItems": 1 + } + ] + } + }, + "allOf": [ + { + "$comment": "This allows enforcement of no additional properties in this nested schema - could reuse above properties but prefer a separate block", + "properties": { + "class": {}, + "enable": {}, + "trace": {}, + "type": {}, + "systemPoller": {} + }, + "additionalProperties": false + }, + { + "if": { "properties": { "type": { "const": "default" } } }, + "then": { + "required": [], + "properties": {} + }, + "else": {} + }, + { + "if": { "properties": { "type": { "const": "Prometheus" } } }, + "then": { + "required": [], + "properties": {} + }, + "else": {} + } + ] + }, + "else": {} + } + ] +} diff --git a/src/schema/1.30.0/shared_schema.json b/src/schema/1.30.0/shared_schema.json new file mode 100644 index 00000000..aa96cb2e --- /dev/null +++ b/src/schema/1.30.0/shared_schema.json @@ -0,0 +1,50 @@ +{ + "$id": "shared_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry streaming Shared schema", + "description": "", + "type": "object", + "allOf": [ + { + "if": { "properties": { "class": { "const": "Shared" } } }, + "then": { + "required": [ + "class" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry streaming Shared class", + "type": "string", + "enum": [ "Shared" ] + } + }, + "additionalProperties": { + "properties": { + "class": { + "title": "Class", + "type": "string", + "enum": [ + "Constants", + "Secret" + ] + } + }, + "allOf": [ + { + "if": { "properties": { "class": { "const": "Constants" } } }, + "then": { "$ref": "base_schema.json#/definitions/constants" }, + "else": {} + }, + { + "if": { "properties": { "class": { "const": "Secret" } } }, + "then": { "$ref": "base_schema.json#/definitions/secret" }, + "else": {} + } + ] + } + }, + "else": {} + } + ] +} \ No newline at end of file diff --git a/src/schema/1.30.0/system_poller_schema.json b/src/schema/1.30.0/system_poller_schema.json new file mode 100644 index 00000000..dcb3a454 --- /dev/null +++ b/src/schema/1.30.0/system_poller_schema.json @@ -0,0 +1,242 @@ +{ + "$id": "system_poller_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming system poller schema", + "description": "", + "type": "object", + "definitions": { + "systemPoller": { + "$comment": "system_schema.json should be updated when new property added", + "title": "System Poller", + "description": "", + "type": "object", + "properties": { + "enable": { + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "interval": { + "title": "Collection interval (in seconds)", + "description": "If endpointList is specified, minimum=1. Without endpointList, minimum=60 and maximum=60000. Allows setting interval=0 to not poll on an interval.", + "type": "integer", + "default": 300 + }, + "trace": { + "$ref": "base_schema.json#/definitions/trace" + }, + "tag": { + "$comment": "Deprecated! Use actions with a setTag action.", + "allOf": [ + { + "$ref": "base_schema.json#/definitions/tag" + } + ] + }, + "actions": { + "title": "Actions", + "description": "Actions to be performed on the systemPoller.", + "default": [ + { + "setTag": { + "tenant": "`T`", + "application": "`A`" + } + } + ], + "allOf": [{ "$ref": "actions_schema.json#/definitions/inputDataStreamActionsChain" }] + }, + "endpointList": { + "title": "Endpoint List", + "description": "List of endpoints to use in data collection", + "oneOf": [ + { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "endpoints_schema.json#/definitions/endpointsPointerRef" + }, + { + "$ref": "endpoints_schema.json#/definitions/endpointsItemPointerRef" + }, + { + "if": { "required": [ "items" ]}, + "then": { + "$ref": "endpoints_schema.json#/definitions/endpointsObjectRef" + }, + "else": { + "$ref": "endpoints_schema.json#/definitions/endpointObjectRef" + } + } + + ] + }, + "minItems": 1 + }, + { + "$ref": "endpoints_schema.json#/definitions/endpointsPointerRef" + }, + { + "$ref": "endpoints_schema.json#/definitions/endpointsObjectRef" + } + ] + } + }, + "oneOf": [ + { + "allOf": [ + { + "if": { "required": [ "endpointList" ] }, + "then": { + "properties": { + "interval": { + "minimum": 1 + } + } + }, + "else": { + "properties":{ + "interval": { + "minimum": 60, + "maximum": 6000 + } + } + } + } + ] + }, + { + "allOf": [ + { + "properties": { + "interval": { + "enum": [0] + } + } + } + ] + } + ] + }, + "systemPollerPointerRef": { + "type": "string", + "minLength": 1, + "declarationClass": "Telemetry_System_Poller" + }, + "systemPollerObjectRef": { + "allOf": [ + { + "$comment": "This allows enforcement of no additional properties in this nested schema - could reuse above properties but prefer a separate block", + "properties": { + "enable": {}, + "trace": {}, + "interval": {}, + "tag": {}, + "actions": {}, + "endpointList": {} + }, + "additionalProperties": false + }, + { + "$ref": "#/definitions/systemPoller" + } + ] + } + }, + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_System_Poller" } } }, + "then": { + "required": [ + "class" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming System Poller class", + "type": "string", + "enum": [ "Telemetry_System_Poller" ] + }, + "host": { + "$comment": "Deprecated! Use Telemetry_System to define target device", + "default": "localhost", + "allOf": [ + { + "$ref": "base_schema.json#/definitions/host" + } + ] + }, + "port": { + "$comment": "Deprecated! Use Telemetry_System to define target device", + "default": 8100, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/port" + } + ] + }, + "protocol": { + "$comment": "Deprecated! Use Telemetry_System to define target device", + "default": "http", + "allOf": [ + { + "$ref": "base_schema.json#/definitions/protocol" + } + ] + }, + "allowSelfSignedCert": { + "$comment": "Deprecated! Use Telemetry_System to define target device", + "title": "Allow Self-Signed Certificate", + "default": false, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/allowSelfSignedCert" + } + ] + }, + "enableHostConnectivityCheck": { + "$comment": "Deprecated! Use Telemetry_System to define target device", + "$ref": "base_schema.json#/definitions/enableHostConnectivityCheck" + }, + "username": { + "$comment": "Deprecated! Use Telemetry_System to define target device", + "$ref": "base_schema.json#/definitions/username" + }, + "passphrase": { + "$comment": "Deprecated! Use Telemetry_System to define target device", + "$ref": "base_schema.json#/definitions/secret" + } + }, + "allOf": [ + { + "$comment": "This allows enforcement of no additional properties in this nested schema - could reuse above properties but prefer a separate block", + "properties": { + "class": {}, + "enable": {}, + "trace": {}, + "interval": {}, + "tag": {}, + "host": {}, + "port": {}, + "protocol": {}, + "allowSelfSignedCert": {}, + "enableHostConnectivityCheck": {}, + "username": {}, + "passphrase": {}, + "actions": {}, + "endpointList": {} + }, + "additionalProperties": false + }, + { + "$ref": "#/definitions/systemPoller" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/src/schema/1.30.0/system_schema.json b/src/schema/1.30.0/system_schema.json new file mode 100644 index 00000000..cba58faa --- /dev/null +++ b/src/schema/1.30.0/system_schema.json @@ -0,0 +1,121 @@ +{ + "$id": "system_schema.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Telemetry Streaming System schema", + "description": "", + "type": "object", + "allOf": [ + { + "if": { "properties": { "class": { "const": "Telemetry_System" } } }, + "then": { + "required": [ + "class" + ], + "properties": { + "class": { + "title": "Class", + "description": "Telemetry Streaming System class", + "type": "string", + "enum": [ "Telemetry_System" ] + }, + "enable": { + "title": "Enable all pollers attached to device", + "default": true, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/enable" + } + ] + }, + "trace": { + "$ref": "base_schema.json#/definitions/trace" + }, + "host": { + "title": "System connection address", + "default": "localhost", + "allOf": [ + { + "$ref": "base_schema.json#/definitions/host" + } + ] + }, + "port": { + "title": "System connection port", + "default": 8100, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/port" + } + ] + }, + "protocol": { + "title": "System connection protocol", + "default": "http", + "allOf": [ + { + "$ref": "base_schema.json#/definitions/protocol" + } + ] + }, + "allowSelfSignedCert": { + "title": "Allow Self-Signed Certificate", + "default": false, + "allOf": [ + { + "$ref": "base_schema.json#/definitions/allowSelfSignedCert" + } + ] + }, + "enableHostConnectivityCheck": { + "$ref": "base_schema.json#/definitions/enableHostConnectivityCheck" + }, + "username": { + "title": "System Username", + "$ref": "base_schema.json#/definitions/username" + }, + "passphrase": { + "title": "System Passphrase", + "$ref": "base_schema.json#/definitions/secret" + }, + "systemPoller": { + "title": "System Poller declaration", + "oneOf": [ + { + "$ref": "system_poller_schema.json#/definitions/systemPollerPointerRef" + }, + { + "$ref": "system_poller_schema.json#/definitions/systemPollerObjectRef" + }, + { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "system_poller_schema.json#/definitions/systemPollerObjectRef" + }, + { + "$ref": "system_poller_schema.json#/definitions/systemPollerPointerRef" + } + ] + }, + "minItems": 1 + } + ] + }, + "iHealthPoller": { + "title": "iHealth Poller declaration", + "oneOf": [ + { + "$ref": "ihealth_poller_schema.json#/definitions/iHealthPollerPointerRef" + }, + { + "$ref": "ihealth_poller_schema.json#/definitions/iHealthPollerObjectRef" + } + ] + } + }, + "additionalProperties": false + } + } + ] +} \ No newline at end of file diff --git a/src/schema/latest/base_schema.json b/src/schema/latest/base_schema.json index 0a7e3366..2b1f9859 100644 --- a/src/schema/latest/base_schema.json +++ b/src/schema/latest/base_schema.json @@ -242,8 +242,8 @@ "description": "Version of ADC Declaration schema this declaration uses", "type": "string", "$comment": "IMPORTANT: In enum array, please put current schema version first, oldest-supported version last. Keep enum array sorted most-recent-first.", - "enum": [ "1.29.0", "1.28.0", "1.27.1", "1.27.0", "1.26.0", "1.25.0", "1.24.0", "1.23.0", "1.22.0", "1.21.0", "1.20.1", "1.20.0", "1.19.0", "1.18.0", "1.17.0", "1.16.0", "1.15.0", "1.14.0", "1.13.0", "1.12.0", "1.11.0", "1.10.0", "1.9.0", "1.8.0", "1.7.0", "1.6.0", "1.5.0", "1.4.0", "1.3.0", "1.2.0", "1.1.0", "1.0.0", "0.9.0" ], - "default": "1.29.0" + "enum": [ "1.30.0", "1.29.0", "1.28.0", "1.27.1", "1.27.0", "1.26.0", "1.25.0", "1.24.0", "1.23.0", "1.22.0", "1.21.0", "1.20.1", "1.20.0", "1.19.0", "1.18.0", "1.17.0", "1.16.0", "1.15.0", "1.14.0", "1.13.0", "1.12.0", "1.11.0", "1.10.0", "1.9.0", "1.8.0", "1.7.0", "1.6.0", "1.5.0", "1.4.0", "1.3.0", "1.2.0", "1.1.0", "1.0.0", "0.9.0" ], + "default": "1.30.0" }, "$schema": { "title": "Schema", diff --git a/src/schema/latest/endpoints_schema.json b/src/schema/latest/endpoints_schema.json index c389e75a..1e12b4b3 100644 --- a/src/schema/latest/endpoints_schema.json +++ b/src/schema/latest/endpoints_schema.json @@ -24,6 +24,10 @@ "type": "string", "minLength": 1 }, + "numericalEnums": { + "title": "SNMP Options: print enums numerically", + "type": "boolean" + }, "path": { "title": "Path to query data from", "type": "string", @@ -41,10 +45,18 @@ "if": { "properties": { "protocol": { "const": "snmp" } } }, "then": { "properties": { + "numericalEnums": { + "default": false + }, "path": { "pattern": "^[a-zA-Z0-9.]+$" } } + }, + "else": { + "not": { + "required": ["numericalEnums"] + } } } ], @@ -113,6 +125,7 @@ "properties": { "enable": {}, "name": {}, + "numericalEnums": {}, "path": {}, "protocol": {} }, diff --git a/test/README.md b/test/README.md index 8155e18e..2b62a797 100644 --- a/test/README.md +++ b/test/README.md @@ -143,12 +143,16 @@ If you already have an existing set of devices, you can run the functional tests Env variables + ARTIFACTORY_DOCKER_HUB - set to the Docker Hub mirror of your choice. If ommitted, will pull Docker images from Docker Hub + CONSUMER_EXCLUDE_REGEX - specify RegEx to exclude Consumers by name + CONSUMER_INCLUDE_REGEX - specify RegEx to include Consumers by name + DUT_EXCLUDE_REGEX - specify RegEx to exclude DUT by hostname + DUT_INCLUDE_REGEX - specify RegEx to include DUT by hostname + SKIP_DUT_SETUP - set value to 1 or true to skip BIG-IP setup step + SKIP_DUT_TEARDOWN - set value to 1 or true to skip BIG-IP teardown step SKIP_DUT_TESTS - set value to 1 or true to skip package tests against BIG-IP. DUT device setup/teardown will still run. SKIP_CONSUMER_TESTS - set value to 1 or true to skip package tests against Consumers - CONSUMER_TYPE_REGEX - specify RegEx to filter Consumers by name - SKIP_PULL_CONSUMER_TESTS - set value to 1 or true to skip package tests against Pull Consumers TEST_HARNESS_FILE - set to the filepath of the test harness file; example harness file above - ARTIFACTORY_DOCKER_HUB - set to the Docker Hub mirror of your choice. If ommitted, will pull Docker images from Docker Hub 3. Trigger the test run with `npm run test-functional`. @@ -159,8 +163,7 @@ If you already have an existing set of devices, you can run the functional tests #!/usr/bin/env bash export SKIP_DUT_TESTS="true" export SKIP_CONSUMER_TESTS="false" - export CONSUMER_TYPE_REGEX="splunk" - export SKIP_PULL_CONSUMER_TESTS="true" + export CONSUMER_INCLUDE_REGEX="splunk" export TEST_HARNESS_FILE="/path/to/harness_facts_flat.json" export ARTIFACTORY_DOCKER_HUB="mymirror.test.com/path" npm run test-functional @@ -174,6 +177,6 @@ These tests are under /test/functional/consumers/googleCloudMonitoringTests.js. - GCP_PROJECT_ID - GCP_SERVICE_EMAIL -The above environment variables all come from a service account in GCP. The service account that the tests are using is named telemetryStreamingTesting. In the GCP GUI, you can go to IAM & Admin -> Service Accounts to get to the list of service account. Once the service profile has been selected, you will be able to see what the service account email is and should also see any private key ids for keys that have been created. To get a new private key, the service account should be edited and the "CREATE KEY" option is used. After choosing to create a new key, select the JSON file option. You will be given a file that will have the private key inside of it. +The above environment variables all come from a service account in GCP. The service account that the tests are using is named telemetryStreamingTesting. In the GCP GUI, you can go to IAM & Admin -> Service Accounts to get to the list of service account. Once the service profile has been selected, you will be able to see what the service account email is and should also see any private key ids for keys that have been created. To get a new private key, the service account should be edited and the "CREATE KEY" option is used. After choosing to create a new key, select the JSON file option. You will be given a file that will have the private key inside of it. Important note: Gitlab does not like \n's and there are many of these in the private key given from GCP. Currently the \n's have all been replaced with "REPLACE" as an easy way to identify where \n's should go so that we can manually add them in the code. The \n's should be replaced before adding the private key to the environment variable in Gitlab. diff --git a/test/customMochaReporter.js b/test/customMochaReporter.js index ccdd3441..47910a93 100644 --- a/test/customMochaReporter.js +++ b/test/customMochaReporter.js @@ -68,7 +68,9 @@ function CustomMochaReporter(runner, options) { startTime: Date.now() }; } - fileLogger.info(`${currentTest.attempts ? 'Retrying' : 'Starting'} test - ${currentTest.title}`); + + const retryInfo = currentTest.attempts ? ` (${currentTest.attempts} attempt(s) made)` : ''; + fileLogger.info(`${currentTest.attempts ? 'Retrying' : 'Starting'} test - ${currentTest.title}${retryInfo}`); currentTest.attempts += 1; }); @@ -77,14 +79,13 @@ function CustomMochaReporter(runner, options) { const fmtArgs = [test.title]; currentTest.endTime = Date.now(); - if (test.speed !== 'fast') { + if (currentTest.attempts > 1) { + fmt += color('fail', ' (attempts=%d total_time=%dms last_exec_time=%dms)'); + fmtArgs.push(currentTest.attempts, currentTest.endTime - currentTest.startTime, test.duration); + } else { fmt += color(test.speed, ' (%dms)'); fmtArgs.push(test.duration); } - if (currentTest.attempts > 1) { - fmt += color('fail', ' (attempts=%d duration=%dms)'); - fmtArgs.push(currentTest.attempts, currentTest.endTime - currentTest.startTime); - } currentTest = {}; fmtArgs.unshift(fmt); console.info.apply(console, fmtArgs); @@ -103,8 +104,11 @@ function CustomMochaReporter(runner, options) { const fmtArgs = [failedTests, test.title]; if (currentTest.attempts > 1) { - fmt += color('fail', ' (attempts=%d duration=%dms)'); - fmtArgs.push(currentTest.attempts, currentTest.endTime - currentTest.startTime); + fmt += color('fail', ' (attempts=%d total_time=%dms last_exec_time=%dms)'); + fmtArgs.push(currentTest.attempts, currentTest.endTime - currentTest.startTime, test.duration); + } else { + fmt += color('fail', ' (%dms)'); + fmtArgs.push(test.duration); } currentTest = {}; fmtArgs.unshift(fmt); diff --git a/test/functional/cloud/awsTests.js b/test/functional/cloud/awsTests.js index 4662d3a3..304ce7ed 100644 --- a/test/functional/cloud/awsTests.js +++ b/test/functional/cloud/awsTests.js @@ -8,98 +8,62 @@ 'use strict'; -const fs = require('fs'); -const assert = require('assert'); -const AWS = require('aws-sdk'); -const constants = require('../shared/constants'); -const testUtil = require('../shared/util'); -const awsUtil = require('../../../src/lib/consumers/shared/awsUtil'); - -const ENV_FILE = process.env[constants.ENV_VARS.CLOUD.FILE]; -const ENV_INFO = JSON.parse(fs.readFileSync(ENV_FILE)); -const VM_IP = ENV_INFO.instances[0].mgmt_address; -const VM_PORT = ENV_INFO.instances[0].mgmt_port; -const VM_USER = ENV_INFO.instances[0].admin_username; -const VM_PWD = ENV_INFO.instances[0].admin_password; -const BUCKET = ENV_INFO.bucket; -const REGION = ENV_INFO.region; -const METRIC_NAMESPACE = process.env[constants.ENV_VARS.AWS.METRIC_NAMESPACE]; - -const CLIENT_SECRET = process.env[constants.ENV_VARS.AWS.ACCESS_KEY_SECRET]; -const CLIENT_ID = process.env[constants.ENV_VARS.AWS.ACCESS_KEY_ID]; - -describe('AWS Cloud-based Tests', function () { - this.timeout(600000); - let s3; - let cloudWatch; - let options = {}; - let vmAuthToken; - - const deviceInfo = { - ip: VM_IP, - username: VM_USER, - port: VM_PORT, - password: VM_PWD - }; - - const assertPost = (declaration) => testUtil.postDeclaration(deviceInfo, declaration) - .then((response) => { - testUtil.logger.info('Response from declaration post', { host: VM_IP, response }); - return assert.strictEqual(response.message, 'success', 'POST declaration should return success'); - }); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + +const awsUtil = require('../shared/cloudUtils/aws'); +const awsSrcUtil = require('../../../src/lib/consumers/shared/awsUtil'); +const harnessUtils = require('../shared/harness'); +const logger = require('../shared/utils/logger').getChild('awsCloudTests'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/cloud/awsTests + */ - before((done) => { - testUtil.getAuthToken(VM_IP, VM_USER, VM_PWD, VM_PORT) - .then((data) => { - vmAuthToken = data.token; - options = { - protocol: 'https', - port: VM_PORT, - headers: { - 'X-F5-Auth-Token': vmAuthToken - } - }; +logger.info('Initializing harness info'); +const harnessInfo = awsUtil.getCloudHarnessJSON(); +const newHarness = harnessUtils.initializeFromJSON(harnessInfo); - AWS.config.update({ - region: REGION, - accessKeyId: CLIENT_ID, - secretAccessKey: CLIENT_SECRET - }); - s3 = new AWS.S3({ apiVersion: '2006-03-01' }); - cloudWatch = new AWS.CloudWatch({ apiVersion: '2010-08-01' }); +assert.isDefined(newHarness, 'should have harness be initialized at this point'); +assert.isNotEmpty(newHarness.bigip, 'should initialize harness'); +harnessUtils.setDefaultHarness(newHarness); +logger.info('Harness info initialized'); + +describe('AWS Cloud-based Tests', () => { + const harness = harnessUtils.getDefaultHarness(); + const tsRPMInfo = miscUtils.getPackageDetails(); + let AWS_META = null; - done(); - }) - .catch((err) => { done(err); }); + before(() => { + assert.isDefined(harness, 'should have harness be initialized at this point'); + assert.isNotEmpty(harness.bigip, 'should initialize harness'); }); - describe('Setup', () => { - it('should install package', () => { - const packageDetails = testUtil.getPackageDetails(); - const fullPath = `${packageDetails.path}/${packageDetails.name}`; - return testUtil.installPackage(VM_IP, vmAuthToken, fullPath, VM_PORT) - .then(() => { - testUtil.logger.info(`Successfully installed RPM: ${fullPath} on ${VM_IP}`); - }); - }); + describe('DUT Setup', () => { + testUtils.shouldRemovePreExistingTSDeclaration(harness.bigip); + testUtils.shouldRemovePreExistingTSPackage(harness.bigip); + testUtils.shouldInstallTSPackage(harness.bigip, () => tsRPMInfo); + testUtils.shouldVerifyTSPackageInstallation(harness.bigip); + }); - it('should verify TS service is running', () => { - const uri = `${constants.BASE_ILX_URI}/info`; + describe('IAM Roles', function () { + this.timeout(600000); - return new Promise((resolve) => { setTimeout(resolve, 5000); }) - .then(() => testUtil.makeRequest(VM_IP, uri, options)) - .then((data) => { - data = data || {}; - testUtil.logger.info(`${uri} response`, { host: VM_IP, data }); - return assert.notStrictEqual(data.version, undefined); - }); - }); - }); + before(() => awsUtil.getCloudMetadataFromProcessEnv() + .then((metadata) => { + AWS_META = metadata; + awsUtil.configureAWSGlobal(AWS_META); + })); - describe('IAM Roles', () => { - describe('AWS_S3', () => { - it('should post systemPoller declaration without credentials', () => { - const declaration = { + describe('AWS S3', () => { + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry', My_System: { class: 'Telemetry_System', @@ -110,52 +74,67 @@ describe('AWS Cloud-based Tests', function () { My_IAM_Consumer: { class: 'Telemetry_Consumer', type: 'AWS_S3', - bucket: BUCKET, - region: REGION + bucket: AWS_META.bucket, + region: AWS_META.region } - }; - return assertPost(declaration); + })); }); - it('should retrieve systemPoller info from bucket', function () { - this.timeout(180000); - - return new Promise((resolve) => { setTimeout(resolve, 90000); }) - .then(() => new Promise((resolve, reject) => { - s3.listObjects({ Bucket: BUCKET, MaxKeys: 5 }, (err, data) => { - if (err) reject(err); - let bucketContents = data.Contents; - assert.notDeepStrictEqual(bucketContents, []); - bucketContents = bucketContents.sort((o1, o2) => o2.LastModified - o1.LastModified); - resolve(bucketContents); + describe('System Poller data', () => { + let s3; + + before(() => { + s3 = awsUtil.getS3Client(); + }); + + harness.bigip.forEach((bigip) => it( + `should check AWS S3 for system poller data - ${bigip.name}`, + () => (new Promise((resolve, reject) => { + s3.listObjects({ Bucket: AWS_META.bucket, MaxKeys: 5 }, (err, data) => { + if (err) { + reject(err); + } else { + resolve((data.Contents || []).sort((o1, o2) => o2.LastModified - o1.LastModified)); + } }); })) - .then((bucketContents) => new Promise((resolve, reject) => { - const key = bucketContents[0].Key; - s3.getObject({ Bucket: BUCKET, Key: key, ResponseContentType: 'application/json' }, (err, data) => { - if (err) reject(err); + .then((bucketContents) => new Promise((resolve, reject) => { + assert.isNotEmpty(bucketContents, 'should return non empty response'); + + const key = bucketContents[0].Key; + s3.getObject({ Bucket: AWS_META.bucket, Key: key, ResponseContentType: 'application/json' }, (err, data) => { + if (err) { + reject(err); + } else { + resolve(data); + } + }); + })) + .then((data) => { const body = JSON.parse(data.Body.toString()); // depending on onboarding result, hostname may vary // sufficient to check that there's an entry here // (deployment creates a bucket per instance) - assert.notDeepStrictEqual(body.system, {}); - assert.strictEqual(body.telemetryEventCategory, 'systemInfo'); - resolve(); - }); - })); + assert.isNotEmpty(body.system); + assert.deepStrictEqual(body.telemetryEventCategory, 'systemInfo'); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }) + )); }); - it('should remove configuration', () => { - const declaration = { + describe('Teardown TS', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry' - }; - return assertPost(declaration); + })); }); }); - describe('AWS_CloudWatch_Metrics', () => { - it('should post systemPoller declaration without credentials', () => { - const declaration = { + describe('AWS CloudWatch Metrics', () => { + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry', controls: { class: 'Controls', @@ -172,31 +151,40 @@ describe('AWS Cloud-based Tests', function () { class: 'Telemetry_Consumer', type: 'AWS_CloudWatch', dataType: 'metrics', - metricNamespace: METRIC_NAMESPACE, - region: REGION + metricNamespace: AWS_META.metricNamespace, + region: AWS_META.region } - }; - return assertPost(declaration); + })); }); - it('should retrieve systemPoller info from metric namespace', function () { - this.timeout(300000); - - const startTime = new Date().toISOString(); - // metrics take around 2-3 minutes to show up - return new Promise((resolve) => { setTimeout(resolve, 180000); }) - .then(() => { - // get system poller data - const uri = `${constants.BASE_ILX_URI}/systempoller/My_System`; - return testUtil.makeRequest(VM_IP, uri, options); - }) - .then((sysPollerData) => { - const defDimensions = awsUtil.getDefaultDimensions(sysPollerData[0]); - const endTime = new Date().toISOString(); + describe('System Poller data', () => { + let cloudWatch; + let metricDimensions; + + before(() => { + cloudWatch = awsUtil.getCloudWatchClient(); + metricDimensions = {}; + }); + + harness.bigip.forEach((bigip) => it( + `should fetch system poller data via debug endpoint - ${bigip.name}`, + () => bigip.telemetry.getSystemPollerData('My_System') + .then((data) => { + metricDimensions[bigip.hostname] = awsSrcUtil.getDefaultDimensions(data[0]); + }) + )); + + harness.bigip.forEach((bigip) => it( + `should check AWS CloudWatch Metrics for system poller data - ${bigip.name}`, + () => { + const timeStart = new Date(); + const timeEnd = new Date(); + timeStart.setMinutes(timeEnd.getMinutes() - 5); + const getOpts = { MaxDatapoints: 10, - StartTime: startTime, - EndTime: endTime, + StartTime: timeStart.toISOString(), + EndTime: timeEnd.toISOString(), // API requires all dimension values if present for the results to appear // you can't match with just one or no dimension value MetricDataQueries: [ @@ -204,9 +192,9 @@ describe('AWS Cloud-based Tests', function () { Id: 'm1', MetricStat: { Metric: { - Namespace: METRIC_NAMESPACE, + Namespace: AWS_META.metricNamespace, MetricName: 'F5_system_cpu', - Dimensions: defDimensions + Dimensions: miscUtils.deepCopy(metricDimensions[bigip.hostname]) }, Period: 300, Stat: 'Average' @@ -215,22 +203,26 @@ describe('AWS Cloud-based Tests', function () { ] }; - return cloudWatch.getMetricData(getOpts).promise(); - }) - .then((data) => { - // if no match, result = null - assert.notStrictEqual(data, null); - const metricDataRes = data.MetricDataResults; - assert.notDeepStrictEqual(metricDataRes, []); - assert.notStrictEqual(metricDataRes[0].Values.length, 0); - }); + return cloudWatch.getMetricData(getOpts).promise() + .then((data) => { + // if no match, result = null + assert.isNotNull(data); + const metricDataRes = data.MetricDataResults; + assert.isNotEmpty(metricDataRes); + assert.isNotEmpty(metricDataRes[0].Values); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }); + } + )); }); - it('should remove configuration', () => { - const declaration = { + describe('Teardown TS', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry' - }; - return assertPost(declaration); + })); }); }); }); diff --git a/test/functional/cloud/azureTests.js b/test/functional/cloud/azureTests.js index 0d5fbd89..05de9000 100644 --- a/test/functional/cloud/azureTests.js +++ b/test/functional/cloud/azureTests.js @@ -8,91 +8,61 @@ 'use strict'; -const assert = require('assert'); -const constants = require('../shared/constants'); -const testUtil = require('../shared/util'); -const azureUtil = require('../shared/azureUtil'); - -const VM_HOSTNAME = process.env[constants.ENV_VARS.AZURE.VM_HOSTNAME]; -const VM_IP = process.env[constants.ENV_VARS.AZURE.VM_IP]; -const VM_PORT = process.env[constants.ENV_VARS.AZURE.VM_PORT] || 8443; -const VM_USER = process.env[constants.ENV_VARS.AZURE.VM_USER] || 'admin'; -const VM_PWD = process.env[constants.ENV_VARS.AZURE.VM_PWD]; -const WORKSPACE_ID = process.env[constants.ENV_VARS.AZURE.WORKSPACE_MI]; -const TENANT_ID = process.env[constants.ENV_VARS.AZURE.TENANT]; -const CLIENT_SECRET = process.env[constants.ENV_VARS.AZURE.LOG_KEY]; -const CLIENT_ID = process.env[constants.ENV_VARS.AZURE.CLIENT_ID]; -const APPINS_API_KEY = process.env[constants.ENV_VARS.AZURE.APPINS_API_KEY]; -const APPINS_APP_ID = process.env[constants.ENV_VARS.AZURE.APPINS_APP_ID]; -const CLOUD_TYPE = process.env[constants.ENV_VARS.AZURE.CLOUD_TYPE]; - -describe('Azure Cloud-based Tests', function () { - this.timeout(600000); - let options = {}; - let vmAuthToken; - const deviceInfo = { - ip: VM_IP, - username: VM_USER, - port: VM_PORT, - password: VM_PWD - }; - - const assertPost = (declaration) => testUtil.postDeclaration(deviceInfo, declaration) - .then((response) => { - testUtil.logger.info('Response from declaration post', { hostname: VM_HOSTNAME, response }); - return assert.strictEqual(response.message, 'success', 'POST declaration should return success'); - }); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); - before((done) => { - testUtil.getAuthToken(VM_IP, VM_USER, VM_PWD, VM_PORT) - .then((data) => { - vmAuthToken = data.token; - options = { - protocol: 'https', - port: VM_PORT, - headers: { - 'X-F5-Auth-Token': vmAuthToken - } - }; - done(); - }) - .catch((err) => { done(err); }); - }); +const azureUtil = require('../shared/cloudUtils/azure'); +const harnessUtils = require('../shared/harness'); +const logger = require('../shared/utils/logger').getChild('azureCloudTests'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); - describe('Setup', () => { - it('should install package', () => { - const packageDetails = testUtil.getPackageDetails(); - const fullPath = `${packageDetails.path}/${packageDetails.name}`; - return testUtil.installPackage(VM_IP, vmAuthToken, fullPath, VM_PORT) - .then(() => { - testUtil.logger.info(`Successfully installed RPM: ${fullPath} on ${VM_IP}`); - }); - }); +chai.use(chaiAsPromised); +const assert = chai.assert; - it('should verify TS service is running', () => { - const uri = `${constants.BASE_ILX_URI}/info`; +/** + * @module test/functional/cloud/azureTests + */ - return new Promise((resolve) => { setTimeout(resolve, 5000); }) - .then(() => testUtil.makeRequest(VM_IP, uri, options)) - .then((data) => { - data = data || {}; - testUtil.logger.info(`${uri} response`, { host: VM_IP, data }); - return assert.notStrictEqual(data.version, undefined); - }); - }); +logger.info('Initializing harness info'); +const harnessInfo = azureUtil.getCloudHarnessJSON(); +const newHarness = harnessUtils.initializeFromJSON(harnessInfo); + +assert.isDefined(newHarness, 'should have harness be initialized at this point'); +assert.isNotEmpty(newHarness.bigip, 'should initialize harness'); +harnessUtils.setDefaultHarness(newHarness); +logger.info('Harness info initialized'); + +describe('Azure Cloud-based Tests', () => { + const harness = harnessUtils.getDefaultHarness(); + const tsRPMInfo = miscUtils.getPackageDetails(); + + before(() => { + assert.isDefined(harness, 'should have harness be initialized at this point'); + assert.isNotEmpty(harness.bigip, 'should initialize harness'); + }); + + describe('DUT Setup', () => { + testUtils.shouldRemovePreExistingTSDeclaration(harness.bigip); + testUtils.shouldRemovePreExistingTSPackage(harness.bigip); + testUtils.shouldInstallTSPackage(harness.bigip, () => tsRPMInfo); + testUtils.shouldVerifyTSPackageInstallation(harness.bigip); }); describe('Managed Identities', () => { - describe('Azure_Log_Analytics', () => { - let laReaderToken; - it('should get log reader oauth token', () => azureUtil.getOAuthToken(CLIENT_ID, CLIENT_SECRET, TENANT_ID, CLOUD_TYPE) - .then((data) => { - laReaderToken = data.access_token; - return assert.notStrictEqual(laReaderToken, undefined); + describe('Azure Log Analytics', function () { + let AZURE_LA; + + this.timeout(180000); + + before(() => azureUtil.getCloudMetadataFromProcessEnv(azureUtil.SERVICE_TYPE.LA) + .then((metadata) => { + AZURE_LA = metadata; })); - it('should post systemPoller declaration with useManagedIdentity enabled', () => { - const declaration = { + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry', My_System: { class: 'Telemetry_System', @@ -103,43 +73,69 @@ describe('Azure Cloud-based Tests', function () { My_MI_Consumer: { class: 'Telemetry_Consumer', type: 'Azure_Log_Analytics', - workspaceId: WORKSPACE_ID, + workspaceId: AZURE_LA.workspace, useManagedIdentity: true } - }; - return assertPost(declaration); + })); }); - it('should retrieve systemPoller info from Log Analytics workspace', function () { - this.timeout(120000); - const resourceIdMatch = VM_HOSTNAME.substring(0, VM_HOSTNAME.indexOf('.')); - const queryString = [ - 'F5Telemetry_system_CL', - `where hostname_s == "${VM_HOSTNAME}" and _ResourceId contains "${resourceIdMatch}"`, - 'where TimeGenerated > ago(5m)' - ].join(' | '); - - return new Promise((resolve) => { setTimeout(resolve, 60000); }) - .then(() => azureUtil.queryLogs(laReaderToken, WORKSPACE_ID, queryString, CLOUD_TYPE)) - .then((results) => { - testUtil.logger.info('Response from Log Analytics:', { hostname: VM_HOSTNAME, results }); - const hasRows = results.tables[0] && results.tables[0].rows && results.tables[0].rows[0]; - return assert(hasRows, 'Log Analytics query returned no tables/rows'); - }); + describe('System Poller data', () => { + let ACCESS_TOKEN; + + before(() => azureUtil.getOAuthToken( + AZURE_LA.clientID, + AZURE_LA.logKey, + AZURE_LA.tenant, + AZURE_LA.cloudType + ) + .then((_accessToken) => { + ACCESS_TOKEN = _accessToken; + })); + + harness.bigip.forEach((bigip) => it( + `should check Azure LA for system poller data - ${bigip.name}`, + () => { + const resourceIdMatch = bigip.hostname.substring(0, bigip.hostname.indexOf('.')); + const queryString = [ + 'F5Telemetry_system_CL', + `where hostname_s == "${bigip.hostname}" and _ResourceId contains "${resourceIdMatch}"`, + 'where TimeGenerated > ago(5m)' + ].join(' | '); + return azureUtil.queryLogs( + ACCESS_TOKEN, AZURE_LA.workspace, queryString, AZURE_LA.cloudType + ) + .then((results) => { + assert(results.tables[0], 'Log Analytics query returned no results'); + assert(results.tables[0].rows, 'Log Analytics query returned no rows'); + assert(results.tables[0].rows[0], 'Log Analytics query returned no rows'); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }); + } + )); }); - it('should remove configuration', () => { - const declaration = { + describe('Teardown TS', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry' - }; - return assertPost(declaration); + })); }); }); - describe('Azure_Application_Insights', function () { + describe('Azure Application Insights', function () { + let AZURE_AI; + this.timeout(180000); - it('should post systemPoller declaration with useManagedIdentity enabled', () => { - const declaration = { + + before(() => azureUtil.getCloudMetadataFromProcessEnv(azureUtil.SERVICE_TYPE.AI) + .then((metadata) => { + AZURE_AI = metadata; + })); + + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry', My_System: { class: 'Telemetry_System', @@ -152,26 +148,41 @@ describe('Azure Cloud-based Tests', function () { type: 'Azure_Application_Insights', useManagedIdentity: true } - }; - return assertPost(declaration); + })); }); - it('should retrieve system poller info from Application Insights', () => { - testUtil.logger.info('Delay 120000ms to ensure App Insights api data ready'); - return new Promise((resolve) => { setTimeout(resolve, 120000); }) - .then(() => azureUtil.queryAppInsights(APPINS_APP_ID, APPINS_API_KEY, CLOUD_TYPE)) - .then((response) => { - testUtil.logger.info(response); - const val = response.value['customMetrics/F5_system_tmmMemory']; - return assert.ok(val && val.avg > 0); - }); + describe('System Poller data', () => { + it('sleep for 60sec while AI API is not ready', () => promiseUtils.sleep(60000)); + + harness.bigip.forEach((bigip) => { + it(`should check Azure AI for system poller data - ${bigip.name}`, () => azureUtil.queryAppInsights(AZURE_AI.appID, AZURE_AI.apiKey, AZURE_AI.cloudType) + .then((response) => { + // Sample response + // { + // "value": { + // "start": "2020-03-23T21:44:59.198Z", + // "end": "2020-03-23T21:47:59.198Z", + // "customMetrics/F5_system_tmmMemory": { + // "avg": 15 + // } + // } + // } + const val = response.value['customMetrics/F5_system_tmmMemory']; + assert.isDefined(val, 'should have expected property in response'); + assert.isDefined(val.avg, 'should have expected "avg" property in response'); + assert.isAbove(val.avg, 0, 'should be greater than 0'); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + })); + }); }); - it('should remove configuration', () => { - const declaration = { + describe('Teardown TS', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy({ class: 'Telemetry' - }; - return assertPost(declaration); + })); }); }); }); diff --git a/test/functional/consumerSystemTests.js b/test/functional/consumerSystemTests.js index cf085fde..1767a4fa 100644 --- a/test/functional/consumerSystemTests.js +++ b/test/functional/consumerSystemTests.js @@ -6,17 +6,18 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; const fs = require('fs'); const constants = require('./shared/constants'); -const util = require('./shared/util'); +const harnessUtils = require('./shared/harness'); +const logger = require('./shared/utils/logger').getChild('cs'); +const miscUtils = require('./shared/utils/misc'); -const consumerHost = util.getHosts('CONSUMER')[0]; // only expect one -const checkDockerCmd = 'if [[ -e $(which docker) ]]; then echo exists; fi'; +/** + * @module test/functional/consumerSystemTests + */ // string -> object (consumer module) let consumersMap = {}; @@ -25,39 +26,49 @@ let consumerRequirements = {}; // string -> boolean const systemRequirements = {}; -/** - * Execute command over SSH on CS - * - * @param {String} cmd - command to execute on CS - * - * @returns Promise resolved when command was executed on CS - */ -function runRemoteCmdOnCS(cmd) { - return util.performRemoteCmd(consumerHost.ip, consumerHost.username, cmd, { password: consumerHost.password }); -} - /** * Load Consumers Tests modules * - * @returns mapping consumer name -> consumer module + * @returns {bject} mapping consumer name -> consumer module */ function loadConsumers() { - // env var to run only specific consumer type(s) (e.g. 'elast') - const consumerFilter = process.env[constants.ENV_VARS.CONSUMER_HARNESS.TYPE_REGEX]; - const consumerDir = constants.CONSUMERS_DIR; - let consumers = fs.readdirSync(consumerDir); - // filter consumers by module name if needed - if (consumerFilter) { - util.logger.info(`Using filter '${consumerFilter}' to filter modules from '${consumerDir}'`); - consumers = consumers.filter((fName) => fName.match(new RegExp(consumerFilter, 'i')) !== null); + const ignorePattern = miscUtils.getEnvArg(constants.ENV_VARS.TEST_CONTROLS.CONSUMER.EXCLUDE, { defaultValue: '' }); + const includePattern = miscUtils.getEnvArg(constants.ENV_VARS.TEST_CONTROLS.CONSUMER.INCLUDE, { defaultValue: '' }); + + let consumerFilter; + if (ignorePattern || includePattern) { + logger.info('Filtering Consumers using following patterns', { + ignore: ignorePattern, + include: includePattern + }); + + let ignoreFilter = () => true; // accept by default + if (ignorePattern) { + const regex = new RegExp(ignorePattern, 'i'); + ignoreFilter = (hostname) => !hostname.match(regex); + } + let includeFilter = () => true; // accept by default + if (includePattern) { + const regex = new RegExp(includePattern, 'i'); + includeFilter = (hostname) => hostname.match(regex); + } + consumerFilter = (consumer) => includeFilter(consumer) && ignoreFilter(consumer); } + const consumerDir = constants.CONSUMERS_DIR; const mapping = {}; - consumers.forEach((consumer) => { - const cpath = `${consumerDir}/${consumer}`; - mapping[consumer] = require(cpath); //eslint-disable-line - util.logger.info(`Consumer Tests from '${cpath}' loaded`); - }); + + fs.readdirSync(consumerDir) + .forEach((consumer) => { + if (consumerFilter && !consumerFilter(consumer)) { + logger.warning('Ignoring Consumer file', { consumer }); + } else { + const cpath = `${consumerDir}/${consumer}`; + mapping[consumer] = require(cpath); //eslint-disable-line + logger.info(`Consumer Tests from '${cpath}' loaded`); + } + }); + return mapping; } @@ -100,6 +111,9 @@ function hasMeetRequirements(consumer) { return meet; } +/** + * Setup CS + */ function setup() { describe('Load modules with tests for consumers', () => { // should be loaded at the beginning of process @@ -107,62 +121,40 @@ function setup() { consumerRequirements = loadConsumersRequirements(); }); - // purpose: consumer tests - describe(`Consumer System setup - ${consumerHost.ip}`, () => { - describe('Docker installation', () => { - before(function () { - const needDocker = consumerRequirements.DOCKER && consumerRequirements.DOCKER.indexOf(true) !== -1; - if (!needDocker) { - util.logger.info('Docker is not required for testing. Skip CS setup...'); - this.skip(); - } - }); + describe('Consumer System: Setup', () => { + harnessUtils.getDefaultHarness().other.forEach((cs) => describe(cs.name, () => { + describe('Docker installation', () => { + before(function () { + const needDocker = consumerRequirements.DOCKER && consumerRequirements.DOCKER.indexOf(true) !== -1; + if (!needDocker) { + logger.info('Docker is not required for testing. Skip CS setup...'); + this.skip(); + } + }); - it('should install docker', () => { - // install docker - assume it does not exist - const installCmd = 'curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh'; - return runRemoteCmdOnCS(checkDockerCmd) - .then((response) => { - if (response.includes('exists')) { - return Promise.resolve(); // exists, continue + it('should install docker', () => cs.docker.installed() + .then((isOk) => { + if (isOk) { + cs.logger.info('Docker installed already!'); + return Promise.resolve(); } - return runRemoteCmdOnCS(installCmd); + return cs.docker.install(); }) .then(() => { + cs.logger.info('Docker installed!'); systemRequirements.DOCKER = true; - }) - .catch((err) => { - util.logger.error(`Unable to install 'docker': ${err}`); - return Promise.reject(err); - }); - }); - - it('should remove all docker "container"', () => runRemoteCmdOnCS('docker ps -a -q') - .then((response) => { - if (response) { - return runRemoteCmdOnCS(`docker rm -f ${response}`); - } - return Promise.resolve(); - })); - - it('should remove all docker "image"', () => runRemoteCmdOnCS('docker images -q') - .then((response) => { - if (response) { - return runRemoteCmdOnCS(`docker rmi -f ${response}`); - } - return Promise.resolve(); - })); + })); - it('should prune all docker "system"', () => runRemoteCmdOnCS('docker system prune -f')); - - it('should prune all docker "volume"', () => runRemoteCmdOnCS('docker volume prune -f')); - }); + it('should remove all docker containers, images and etc.', () => cs.docker.removeAll()); + }); + })); }); } +/** + * Run tests + */ function test() { - const methodsToCall = ['setup', 'test', 'teardown']; - describe('Consumer Tests', () => { // consumers tests should be loaded already Object.keys(consumersMap).forEach((consumer) => { @@ -173,7 +165,7 @@ function test() { before(() => { skipTests = !hasMeetRequirements(consumerModule); if (skipTests) { - util.logger.warn(`CS for Consumer Tests '${consumer}' doesn't meet requirements - skip all tests`); + logger.warning(`CS for Consumer Tests '${consumer}' doesn't meet requirements - skip all tests`); } }); beforeEach(function () { @@ -183,11 +175,12 @@ function test() { } }); - methodsToCall.forEach((method) => { + ['setup', 'test', 'teardown'].forEach((method) => { if (consumerModule[method]) { consumerModule[method].apply(consumerModule); } else { - util.logger.console.warn(`WARN: ConsumerTest "${consumer}" has no '${method}' method to call`); + // eslint-disable-next-line no-console + console.warn(`WARN: ConsumerTest "${consumer}" has no '${method}' method to call`); } }); }); @@ -195,38 +188,28 @@ function test() { }); } +/** + * Teardown CS + */ function teardown() { - // purpose: consumer tests - describe(`Consumer host teardown - ${consumerHost.ip}`, () => { - describe('Docker containers and images cleanup', () => { - before(function () { - // skip docker cleanup if docker was not installed - if (!systemRequirements.DOCKER) { - util.logger.info('Docker is not required for testing. Skip CS teardown...'); - this.skip(); - } - }); - - it('should remove all docker "container"', () => runRemoteCmdOnCS('docker ps -a -q') - .then((response) => { - if (response) { - return runRemoteCmdOnCS(`docker rm -f ${response}`); - } - return Promise.resolve(); - })); - - it('should remove all docker "image"', () => runRemoteCmdOnCS('docker images -q') - .then((response) => { - if (response) { - return runRemoteCmdOnCS(`docker rmi -f ${response}`); + describe('Consumer System: Teardown', () => { + harnessUtils.getDefaultHarness().other.forEach((cs) => describe(cs.name, () => { + describe('Docker containers and images cleanup', () => { + before(function () { + // skip docker cleanup if docker was not installed + if (!systemRequirements.DOCKER) { + logger.info('Docker is not required for testing. Skip CS teardown...'); + this.skip(); } - return Promise.resolve(); - })); + }); - it('should prune all docker "system"', () => runRemoteCmdOnCS('docker system prune -f')); + it('should remove all docker containers, images and etc.', () => cs.docker.removeAll()); + }); - it('should prune all docker "volume"', () => runRemoteCmdOnCS('docker volume prune -f')); - }); + describe('Other cleanup', () => { + it('teardown all connections', () => cs.teardown()); + }); + })); }); } diff --git a/test/functional/consumersTests/azureApplicationInsightsTests.js b/test/functional/consumersTests/azureApplicationInsightsTests.js index 6324378d..c53a9a6b 100644 --- a/test/functional/consumersTests/azureApplicationInsightsTests.js +++ b/test/functional/consumersTests/azureApplicationInsightsTests.js @@ -6,20 +6,29 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const testUtil = require('../shared/util'); -const azureUtil = require('../shared/azureUtil'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + +const azureUtil = require('../shared/cloudUtils/azure'); const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const logger = require('../shared/utils/logger').getChild('azureAITests'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); -const DUTS = testUtil.getHosts('BIGIP'); +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/azureAI + */ + +// read in example config +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); /** * Should look like: * [ @@ -34,112 +43,108 @@ const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); * } * ] */ -let APPINS_API_DATA; +let AI_METADATA; -function setup() { - const fileName = process.env[constants.ENV_VARS.AZURE.APPINS_API_DATA]; - assert.ok(fileName, `should define env variable ${constants.ENV_VARS.AZURE.APPINS_API_DATA} (path to file with Azure App Insights API info)`); +/** + * Tests for DUTs + */ +function test() { + describe('Consumer Test: Azure App Insights', () => { + const harness = harnessUtils.getDefaultHarness(); - testUtil.logger.info(`Reading and parsing file '${fileName}' for Azure App Insights`); - APPINS_API_DATA = JSON.parse(fs.readFileSync(fileName)); + const getAppInsightAPIInfo = (function () { + const key2application = {}; + let lastID = -1; + return function getter(key) { + let value = key2application[key]; + if (!value) { + lastID += 1; + value = AI_METADATA[lastID]; + if (!value) { + throw new Error(`Not enough items in AI_METADATA: ${AI_METADATA.length} items configured, but requests for #${lastID}`); + } + key2application[key] = value; + } + return value; + }; + }()); - assert.ok(Array.isArray(APPINS_API_DATA) && APPINS_API_DATA.length > 0, 'should be an array and have 1 or more elements in it'); - APPINS_API_DATA.forEach((item, idx) => { - assert.ok(item.instrKey, `APPINS_API_DATA item #${idx} should have instrKey`); - assert.ok(item.apiKey, `APPINS_API_DATA item #${idx} should have apiKey`); - assert.ok(item.appID, `APPINS_API_DATA item #${idx} should have appID`); - }); - testUtil.logger.debug(`APPINS_API_DATA has ${APPINS_API_DATA.length} items`); -} + before(() => azureUtil.getMetadataFromProcessEnv(azureUtil.SERVICE_TYPE.AI) + .then((metadata) => { + assert.isArray(metadata, 'should be an array'); + assert.isNotEmpty(metadata, 'should have 1 or more elements'); -function test() { - const getAppInsightAPIInfo = (function () { - const key2application = {}; - let lastID = -1; - return function getter(key) { - let value = key2application[key]; - if (!value) { - lastID += 1; - value = APPINS_API_DATA[lastID]; - if (!value) { - throw new Error(`Not enough items in APPINS_API_DATA: ${APPINS_API_DATA.length} items configured, but requests for #${lastID}`); + const props = [ + 'apiKey', + 'appID', + 'instrKey' + ]; + + metadata.forEach((item, idx) => props.forEach((propName) => { + assert.isDefined(item[propName], `Azure Application Insights metadata item #${idx} should have "${propName}" property`); + })); + logger.debug(`Azure Application Insights metadata has ${metadata.length} items - 6 BIG-IPs can be used simultaneously`); + + AI_METADATA = metadata; + })); + + describe('Configure TS and generate data', () => { + let referenceDeclaration; + + before(() => { + referenceDeclaration = miscUtils.deepCopy(DECLARATION); + referenceDeclaration.My_Consumer = { + class: 'Telemetry_Consumer', + type: 'Azure_Application_Insights', + instrumentationKey: null, + maxBatchIntervalMs: 2000 + }; + }); + + testUtils.shouldConfigureTS(harness.bigip, (bigip) => { + const declaration = miscUtils.deepCopy(referenceDeclaration); + const apiInfo = getAppInsightAPIInfo(bigip.name); + declaration.My_Consumer.instrumentationKey = apiInfo.instrKey; + if (apiInfo.region) { + declaration.My_Consumer.region = apiInfo.region; } - key2application[key] = value; - } - return value; - }; - }()); - - describe('Consumer Test: Azure App Insights - Configure TS and generate data', () => { - const referenceDeclaration = testUtil.deepCopy(DECLARATION); - referenceDeclaration.My_Consumer = { - class: 'Telemetry_Consumer', - type: 'Azure_Application_Insights', - instrumentationKey: null, - maxBatchIntervalMs: 2000 - }; - DUTS.forEach((dut) => it(`should configure TS - ${dut.hostalias}`, () => { - const declaration = testUtil.deepCopy(referenceDeclaration); - const apiInfo = getAppInsightAPIInfo(dut.ip); - declaration.My_Consumer.instrumentationKey = apiInfo.instrKey; - if (apiInfo.region) { - declaration.My_Consumer.region = apiInfo.region; - } - return dutUtils.postDeclarationToDUT(dut, declaration); - })); - }); + return declaration; + }); + }); - describe('Consumer Test: Azure App Insights - Test', function () { - this.timeout(180000); - let firstAttemptOverAll = true; - - DUTS.forEach((dut) => { - let triedWithoutAddtlDelay = false; - - it(`should check for system poller data from:${dut.hostalias}`, () => { - const apiInfo = getAppInsightAPIInfo(dut.ip); - return Promise.resolve() - .then(() => { - if (firstAttemptOverAll) { - // first attempt in entire suite - data might not be ready yet - firstAttemptOverAll = false; - testUtil.logger.info('Delay 120000ms to ensure App Insights api data ready (first attempt in entire suite)'); - return testUtil.sleep(120000); - } - if (!triedWithoutAddtlDelay) { - // let's try to fetch data without delay - triedWithoutAddtlDelay = true; - return Promise.resolve(); - } - testUtil.logger.info('Delay 30000ms to ensure App Insights api data ready'); - return testUtil.sleep(30000); - }) - .then(() => azureUtil.queryAppInsights(apiInfo.appID, apiInfo.apiKey)) - .then((response) => { - // Sample response - // { - // "value": { - // "start": "2020-03-23T21:44:59.198Z", - // "end": "2020-03-23T21:47:59.198Z", - // "customMetrics/F5_system_tmmMemory": { - // "avg": 15 - // } - // } - // } - testUtil.logger.info(response); - const val = response.value['customMetrics/F5_system_tmmMemory']; - assert.ok(val && val.avg > 0); - }); + describe('System Poller data', () => { + it('sleep for 60sec while AI API is not ready', () => promiseUtils.sleep(60000)); + + harness.bigip.forEach((bigip) => { + it(`should check Azure AI for system poller data - ${bigip.name}`, () => { + const apiInfo = getAppInsightAPIInfo(bigip.name); + return azureUtil.queryAppInsights(apiInfo.appID, apiInfo.apiKey) + .then((response) => { + // Sample response + // { + // "value": { + // "start": "2020-03-23T21:44:59.198Z", + // "end": "2020-03-23T21:47:59.198Z", + // "customMetrics/F5_system_tmmMemory": { + // "avg": 15 + // } + // } + // } + const val = response.value['customMetrics/F5_system_tmmMemory']; + assert.isDefined(val, 'should have expected property in response'); + assert.isDefined(val.avg, 'should have expected "avg" property in response'); + assert.isAbove(val.avg, 0, 'should be greater than 0'); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }); + }); }); }); }); } -function teardown() { -} - module.exports = { - setup, - test, - teardown + test }; diff --git a/test/functional/consumersTests/azureLogAnalyticsTests.js b/test/functional/consumersTests/azureLogAnalyticsTests.js index 6b1fd471..61c5e787 100644 --- a/test/functional/consumersTests/azureLogAnalyticsTests.js +++ b/test/functional/consumersTests/azureLogAnalyticsTests.js @@ -6,112 +6,142 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const util = require('../shared/util'); -const azureUtil = require('../shared/azureUtil'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + +const azureUtil = require('../shared/cloudUtils/azure'); const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); -const DUTS = util.getHosts('BIGIP'); +chai.use(chaiAsPromised); +const assert = chai.assert; -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); -const PASSPHRASE = process.env[constants.ENV_VARS.AZURE.PASSPHRASE]; -const WORKSPACE_ID = process.env[constants.ENV_VARS.AZURE.WORKSPACE]; -const TENANT_ID = process.env[constants.ENV_VARS.AZURE.TENANT]; -const CLIENT_SECRET = process.env[constants.ENV_VARS.AZURE.LOG_KEY]; -const CLIENT_ID = process.env[constants.ENV_VARS.AZURE.CLIENT_ID]; +/** + * @module test/functional/consumersTests/azureLA + */ + +// read in example config +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; const AZURE_LA_CONSUMER_NAME = 'Azure_LA_Consumer'; -let oauthToken = null; +let ACCESS_TOKEN = null; +let AZURE = null; +/** + * Setup CS and DUTs + */ function setup() { - describe('Consumer Setup: Azure Log Analytics - OAuth token', () => { - it('should get OAuth token', () => azureUtil.getOAuthToken(CLIENT_ID, CLIENT_SECRET, TENANT_ID) - .then((data) => { - oauthToken = data.access_token; - return assert.notStrictEqual(oauthToken, undefined); - }) - .catch((err) => { - util.logger.error(`Unable to get OAuth token: ${err}`); - return Promise.reject(err); + describe('Consumer Setup: Azure Log Analytics', () => { + before(() => { + ACCESS_TOKEN = null; + return azureUtil.getMetadataFromProcessEnv(azureUtil.SERVICE_TYPE.LA) + .then((azureData) => { + AZURE = azureData; + }); + }); + + it('should get OAuth token', () => azureUtil.getOAuthToken(AZURE.clientID, AZURE.logKey, AZURE.tenant) + .then((authToken) => { + assert.isDefined(authToken, 'should acquire auth token'); + ACCESS_TOKEN = authToken; })); }); } +/** + * Tests for DUTs + */ function test() { - const testDataTimestamp = Date.now(); - - describe('Consumer Test: Azure Log Analytics - Configure TS and generate data', () => { - const consumerDeclaration = util.deepCopy(DECLARATION); - consumerDeclaration[AZURE_LA_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'Azure_Log_Analytics', - workspaceId: WORKSPACE_ID, - passphrase: { - cipherText: PASSPHRASE - } - }; - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(consumerDeclaration)) - )); - - it('should send event to TS Event Listener', () => { - const msg = `timestamp="${testDataTimestamp}",test="${testDataTimestamp}",testType="${AZURE_LA_CONSUMER_NAME}"`; - return dutUtils.sendDataToEventListeners((dut) => `hostname="${dut.hostname}",${msg}`); + describe('Consumer Test: Azure Log Analytics', () => { + const harness = harnessUtils.getDefaultHarness(); + const testDataTimestamp = Date.now(); + + before(() => { + assert.isNotNull(ACCESS_TOKEN, 'should acquire Azure LA token'); + assert.isNotNull(AZURE, 'should acquire Azure LA API metadata from process.env'); }); - }); - describe('Consumer Test: Azure Log Analytics - Test', () => { - DUTS.forEach((dut) => { - it(`should check for system poller data from:${dut.hostalias}`, () => { - // system poller is on an interval, so space out the retries - // NOTE: need to determine mechanism to shorten the minimum interval - // for a system poller cycle to reduce the test time here - const queryString = [ - 'F5Telemetry_system_CL', - `where hostname_s == "${dut.hostname}"`, - 'where TimeGenerated > ago(5m)' - ].join(' | '); - return new Promise((resolve) => { setTimeout(resolve, 30000); }) - .then(() => azureUtil.queryLogs(oauthToken, WORKSPACE_ID, queryString)) - .then((results) => { - util.logger.info('Response from Log Analytics:', { hostname: dut.hostname, results }); - assert(results.tables[0], 'Log Analytics query returned no results'); - assert(results.tables[0].rows, 'Log Analytics query returned no rows'); - assert(results.tables[0].rows[0], 'Log Analytics query returned no rows'); - }); - }); + describe('Configure TS and generate data', () => { + let consumerDeclaration; - it(`should check for event listener data from:${dut.hostalias}`, () => { - const queryString = [ - 'F5Telemetry_LTM_CL', - `where hostname_s == "${dut.hostname}"`, - `where test_s == "${testDataTimestamp}"` - ].join(' | '); - return new Promise((resolve) => { setTimeout(resolve, 10000); }) - .then(() => azureUtil.queryLogs(oauthToken, WORKSPACE_ID, queryString)) - .then((results) => { - util.logger.info('Response from Log Analytics:', { hostname: dut.hostname, results }); - assert(results.tables[0], 'Log Analytics query returned no results'); - assert(results.tables[0].rows, 'Log Analytics query returned no rows'); - assert(results.tables[0].rows[0], 'Log Analytics query returned no rows'); - }); + before(() => { + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[AZURE_LA_CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'Azure_Log_Analytics', + workspaceId: AZURE.workspace, + passphrase: { + cipherText: AZURE.passphrase + } + }; }); + + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(consumerDeclaration)); + testUtils.shouldSendListenerEvents(harness.bigip, (bigip, proto, port, idx) => `hostname="${bigip.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${AZURE_LA_CONSUMER_NAME}",protocol="${proto}",msgID="${idx}"`); }); - }); -} -function teardown() { + describe('Event Listener data', () => { + harness.bigip.forEach((bigip) => LISTENER_PROTOCOLS + .forEach((proto) => it( + `should check Azure LA for event listener data (over ${proto}) for - ${bigip.name}`, + () => { + const queryString = [ + 'F5Telemetry_LTM_CL', + `where hostname_s == "${bigip.hostname}"`, + `where testDataTimestamp_s == "${testDataTimestamp}"`, + `where testType_s == "${AZURE_LA_CONSUMER_NAME}"`, + `where protocol_s == "${proto}"` + ].join(' | '); + return azureUtil.queryLogs( + ACCESS_TOKEN, AZURE.workspace, queryString + ) + .then((results) => { + assert(results.tables[0], 'Log Analytics query returned no results'); + assert(results.tables[0].rows, 'Log Analytics query returned no rows'); + assert(results.tables[0].rows[0], 'Log Analytics query returned no rows'); + }) + .catch((err) => { + bigip.logger.error('No event listener data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }); + } + ))); + }); + + describe('System Poller data', () => { + harness.bigip.forEach((bigip) => it( + `should check Azure LA system poller data - ${bigip.name}`, + () => { + const queryString = [ + 'F5Telemetry_system_CL', + `where hostname_s == "${bigip.hostname}"`, + 'where TimeGenerated > ago(5m)' + ].join(' | '); + return azureUtil.queryLogs( + ACCESS_TOKEN, AZURE.workspace, queryString + ) + .then((results) => { + assert(results.tables[0], 'Log Analytics query returned no results'); + assert(results.tables[0].rows, 'Log Analytics query returned no rows'); + assert(results.tables[0].rows[0], 'Log Analytics query returned no rows'); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }); + } + )); + }); + }); } module.exports = { setup, - test, - teardown + test }; diff --git a/test/functional/consumersTests/defaultPullConsumer.js b/test/functional/consumersTests/defaultPullConsumer.js new file mode 100644 index 00000000..57dee575 --- /dev/null +++ b/test/functional/consumersTests/defaultPullConsumer.js @@ -0,0 +1,94 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + +const constants = require('../shared/constants'); +const DEFAULT_UNNAMED_NAMESPACE = require('../../../src/lib/constants').DEFAULT_UNNAMED_NAMESPACE; +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/defaultPullConsumer + */ + +// read in example configs +const BASIC_DECL = miscUtils.readJsonFile(constants.DECL.PULL_CONSUMER_BASIC); +const NAMESPACE_DECL = miscUtils.readJsonFile(constants.DECL.PULL_CONSUMER_WITH_NAMESPACE); + +/** + * Tests for DUTs + */ +function test() { + describe('Consumer Test: Default Pull Consumer', () => { + const harness = harnessUtils.getDefaultHarness(); + + const verifyResponseData = (response, hostname) => { + const body = response[0]; + const headers = response[1].headers; + + assert.lengthOf(body, 1, 'should have only one element'); + assert.deepStrictEqual(body[0].system.hostname, hostname, 'should match hostname'); + assert.ok(headers['content-type'].includes('application/json'), 'content-type should include application/json type'); + }; + + describe('Without namespace', () => { + const pullConsumerName = 'My_Pull_Consumer'; + + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(BASIC_DECL)); + }); + + describe('System Poller data', () => { + harness.bigip.forEach((bigip) => it( + `should get the Pull Consumer's formatted data - ${bigip.name}`, + () => bigip.telemetry.getPullConsumerData(pullConsumerName) + .then((response) => verifyResponseData(response, bigip.hostname)) + )); + + harness.bigip.forEach((bigip) => it( + `should get the Pull Consumer's formatted data using namespace endpoint - ${bigip.name}`, + () => bigip.telemetry + .toNamespace(DEFAULT_UNNAMED_NAMESPACE, true) + .getPullConsumerData(pullConsumerName) + .then((response) => verifyResponseData(response, bigip.hostname)) + )); + }); + }); + + describe('With namespace', () => { + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(NAMESPACE_DECL)); + }); + + describe('System Poller data', () => { + const namespace = 'Second_Namespace'; + const pullConsumerName = 'Pull_Consumer'; + + harness.bigip.forEach((bigip) => it( + `should get the Pull Consumer's formatted data using namespace endpoint - ${bigip.name}`, + () => bigip.telemetry + .toNamespace(namespace) + .getPullConsumerData(pullConsumerName) + .then((response) => verifyResponseData(response, bigip.hostname)) + )); + }); + }); + }); +} + +module.exports = { + test +}; diff --git a/test/functional/consumersTests/elasticsearchTests.js b/test/functional/consumersTests/elasticsearchTests.js index 153b7f9f..1b9998bc 100644 --- a/test/functional/consumersTests/elasticsearchTests.js +++ b/test/functional/consumersTests/elasticsearchTests.js @@ -6,225 +6,304 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const util = require('../shared/util'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/elasticSearch + */ // module requirements const MODULE_REQUIREMENTS = { DOCKER: true }; -const DUTS = util.getHosts('BIGIP'); -const CONSUMER_HOST = util.getHosts('CONSUMER')[0]; -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); - -const ES_CONTAINER_NAME = 'ts_elasticsearch_consumer'; const ES_PROTOCOL = 'http'; const ES_HTTP_PORT = 9200; const ES_TRANSPORT_PORT = 9300; const ES_CONSUMER_NAME = 'Consumer_ElasticSearch'; -const ES_IMAGE_PREFIX = 'docker.elastic.co/elasticsearch/elasticsearch'; const ES_MAX_FIELDS = 5000; const ES_VERSIONS_TO_TEST = ['6.7.2', '7.14.1', '8.0.0']; -function runRemoteCmd(cmd) { - return util.performRemoteCmd(CONSUMER_HOST.ip, CONSUMER_HOST.username, cmd, { password: CONSUMER_HOST.password }); -} +const DOCKER_CONTAINERS = { + ElasticSearch: { + detach: true, + env: { + 'discovery.type': 'single-node', + 'indices.query.bool.max_clause_count': ES_MAX_FIELDS, + 'xpack.security.enabled': 'false' + }, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}elasticsearch`, + name: 'ts_elasticsearch_consumer', + publish: { + [ES_HTTP_PORT]: ES_HTTP_PORT, + [ES_TRANSPORT_PORT]: ES_TRANSPORT_PORT + }, + restart: 'always' + } +}; -function removeESContainer() { - return runRemoteCmd(`docker ps | grep ${ES_CONTAINER_NAME}`) - .then((data) => { - if (data) { - return runRemoteCmd(`docker container rm -f ${ES_CONTAINER_NAME}`); - } - return Promise.resolve(); - }); +// read in example config +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; + +let SERVICE_IS_READY; + +/** + * Get version specific Docker config + * + * @param {string} version - ElasticSearch version + * + * @returns {Object} version specific Docker config + */ +function getDockerConfig(version) { + return Object.assign( + miscUtils.deepCopy(DOCKER_CONTAINERS.ElasticSearch), + { + image: `${DOCKER_CONTAINERS.ElasticSearch.image}:${version}` + } + ); } +/** + * Setup CS and DUTs + */ function setup() { - describe('Consumer Setup: Elastic Search - pull docker images', () => { - ES_VERSIONS_TO_TEST.forEach((version) => { - it(`should pull container image: ${version}`, () => runRemoteCmd(`docker pull ${ES_IMAGE_PREFIX}:${version}`)); + describe('Consumer Setup: ElasticSearch', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + cs.http.createAndSave('elastic', { + port: ES_HTTP_PORT, + protocol: ES_PROTOCOL + }); + + describe('Docker container setup', () => { + ES_VERSIONS_TO_TEST.forEach((elsVer) => { + const dockerConf = getDockerConfig(elsVer); + + it(`should pull ElasticSearch ${elsVer} docker image`, () => cs.docker.pull(dockerConf.image)); + }); }); }); } +/** + * Tests for DUTs + */ function test() { + ES_VERSIONS_TO_TEST.forEach((elsVer) => { + describe(`Consumer Test: ElasticSearch ${elsVer}`, () => { + before(() => { + SERVICE_IS_READY = false; + }); + + testVer(elsVer); + }); + }); +} + +/** + * Tests for specific ElasticSearch version + * + * @param {string} elsVer - ElasticSearch version + */ +function testVer(elsVer) { + const harness = harnessUtils.getDefaultHarness(); + const cs = harnessUtils.getDefaultHarness().other[0]; const testDataTimestamp = Date.now(); + let CONTAINER_STARTED; - describe('Consumer Test: ElasticSearch - Configure Service', () => { - DUTS.forEach((dut) => { - describe(`Device Under Test - ${dut.hostalias}`, () => { - ES_VERSIONS_TO_TEST.forEach((version) => { - describe(`ElasticSearch version - ${version}`, () => { - describe('Consumer service setup', () => { - it('should start container', () => { - const envArgs = `-e "discovery.type=single-node" -e "indices.query.bool.max_clause_count=${ES_MAX_FIELDS}" -e "xpack.security.enabled=false"`; - const portArgs = `-p ${ES_HTTP_PORT}:${ES_HTTP_PORT} -p ${ES_TRANSPORT_PORT}:${ES_TRANSPORT_PORT}`; - const cmd = `docker run -d --restart=always --name ${ES_CONTAINER_NAME} ${portArgs} ${envArgs} ${ES_IMAGE_PREFIX}:${version}`; - - return runRemoteCmd(`docker ps | grep ${ES_CONTAINER_NAME}`) - .then((data) => { - if (data) { - return Promise.resolve(); - } - return runRemoteCmd(cmd); - }); - }); - - it('should check service is up', () => { - const uri = '/_nodes'; - const options = { - protocol: ES_PROTOCOL, - port: ES_HTTP_PORT - }; - - return new Promise((resolve) => { setTimeout(resolve, 5000); }) - .then(() => util.makeRequest(CONSUMER_HOST.ip, uri, options)) - .then((data) => { - const nodeInfo = data._nodes; - assert.strictEqual(nodeInfo.total, 1); - assert.strictEqual(nodeInfo.successful, 1); - }); - }); - - it('should configure index limits', () => { - const uri = `/${ES_CONTAINER_NAME}`; - const options = { - protocol: ES_PROTOCOL, - port: ES_HTTP_PORT, - method: 'PUT', - headers: { - 'Content-Type': 'application/json' - }, - body: { - settings: { - 'index.mapping.total_fields.limit': ES_MAX_FIELDS - } - } - }; - return util.makeRequest(CONSUMER_HOST.ip, uri, options); - }); - }); + describe('Docker container setup', () => { + before(() => { + CONTAINER_STARTED = false; + }); - describe('Consumer Test: ElasticSearch - Configure TS', () => { - it('should configure TS', () => { - const consumerDeclaration = util.deepCopy(DECLARATION); - consumerDeclaration[ES_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'ElasticSearch', - host: CONSUMER_HOST.ip, - protocol: ES_PROTOCOL, - port: ES_HTTP_PORT, - index: ES_CONTAINER_NAME, - apiVersion: version - }; - return dutUtils.postDeclarationToDUT(dut, consumerDeclaration); - }); - - it('should send event to TS Event Listener', () => { - const msg = `hostname="${dut.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${ES_CONSUMER_NAME}"`; - return dutUtils.sendDataToEventListener(dut, msg); - }); - - it('should retrieve SystemPoller data', () => dutUtils.getSystemPollerData(dut, constants.DECL.SYSTEM_NAME) - .then((data) => { - const systemPollerData = data[0]; - assert.notStrictEqual(systemPollerData, undefined); - assert.notStrictEqual(systemPollerData.system, undefined); - })); - }); + it('should remove pre-existing ElasticSearch docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.ElasticSearch.name + )); - describe('Consumer Test: ElasticSearch - Test', () => { - const query = (searchString) => { - const uri = `/${ES_CONTAINER_NAME}/_search?${searchString}`; - const options = { - port: ES_HTTP_PORT, - protocol: ES_PROTOCOL - }; - util.logger.info(`ElasticSearch search query - ${uri}`); - return util.makeRequestWithRetry( - () => util.makeRequest(CONSUMER_HOST.ip, uri, options), - 30000, - 5 - ); - }; - - it('should check for event listener data for', () => new Promise((resolve) => { setTimeout(resolve, 10000); }) - .then(() => query(`size=1&q=data.testType:${ES_CONSUMER_NAME}%20AND%20data.hostname=${dut.hostname}`)) - .then((data) => { - util.logger.info('ElasticSearch response:', data); - const esData = data.hits.hits; - assert.notStrictEqual(esData.length, 0, 'ElasticSearch should return search results'); - - let found = false; - esData.forEach((hit) => { - const eventData = hit._source.data; - if (eventData && eventData.hostname === dut.hostname) { - assert.strictEqual( - eventData.testDataTimestamp, - testDataTimestamp.toString() - ); - found = true; - } - }); - if (!found) { - return Promise.reject(new Error('Event not found')); - } - return Promise.resolve(); - })); - - it('should have system poller data', () => new Promise((resolve) => { setTimeout(resolve, 10000); }) - .then(() => query(`size=1&q=system.hostname:${dut.hostname}`)) - .then((data) => { - util.logger.info('ElasticSearch response:', data); - const esData = data.hits.hits; - assert.notStrictEqual(esData.length, 0, 'ElasticSearch should return search results'); - - let found = false; - esData.forEach((hit) => { - const sysData = hit._source; - if (sysData && sysData.system && sysData.system.hostname === dut.hostname) { - const schema = JSON.parse( - fs.readFileSync(constants.DECL.SYSTEM_POLLER_SCHEMA) - ); - const valid = util.validateAgainstSchema(sysData, schema); - if (valid !== true) { - assert.fail(`output is not valid: ${JSON.stringify(valid.errors)}`); - } - found = true; - } - }); - if (!found) { - return Promise.reject(new Error('System Poller data not found')); - } - return Promise.resolve(); - })); - }); + it('should start new ElasticSearch docker container', () => harnessUtils.docker.startNewContainer( + cs.docker, + getDockerConfig(elsVer) + ) + .then(() => { + CONTAINER_STARTED = true; + })); - describe('TS cleanup - remove ElasticSearch consumer', () => { - it('should remove consumer from TS declaration', () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(DECLARATION))); - }); + it('should check service is up', () => cs.http.elastic.makeRequest({ + uri: '/_nodes' + }) + .then((data) => { + const nodeInfo = data._nodes; + assert.deepStrictEqual(nodeInfo.total, 1); + assert.deepStrictEqual(nodeInfo.successful, 1); + }) + .catch((err) => { + cs.logger.error('Caught error on attempt to check service state. Re-trying in 3sec', err); + return promiseUtils.sleepAndReject(3000, err); + })); + }); + + describe('Configure service', () => { + before(() => { + assert.isOk(CONTAINER_STARTED, 'should start ElasticSearch container!'); + }); + + harness.bigip.forEach((bigip) => it( + `should configure index limits - ${bigip.name}`, + () => cs.http.elastic.makeRequest({ + body: { + settings: { + 'index.mapping.total_fields.limit': ES_MAX_FIELDS + } + }, + headers: { + 'Content-Type': 'application/json' + }, + json: true, + method: 'PUT', + uri: `/${bigip.name}` + }) + .then(() => { + SERVICE_IS_READY = true; + }) + .catch((err) => { + cs.logger.error('Caught error on attempt to configure service. Re-trying in 1sec', err); + return promiseUtils.sleepAndReject(1000, err); + }) + )); + }); + + describe('Configure TS and generate data', () => { + let consumerDeclaration; + + before(() => { + assert.isOk(SERVICE_IS_READY, 'should start ElasticSearch service!'); - describe('Consumer service teardown', () => { - it('should remove container', () => removeESContainer()); + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[ES_CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'ElasticSearch', + host: cs.host.host, + protocol: ES_PROTOCOL, + port: ES_HTTP_PORT, + apiVersion: elsVer + }; + }); + + testUtils.shouldConfigureTS(harness.bigip, (bigip) => { + const decl = miscUtils.deepCopy(consumerDeclaration); + decl[ES_CONSUMER_NAME].index = bigip.name; + return decl; + }); + + testUtils.shouldSendListenerEvents(harness.bigip, (bigip, proto, port, idx) => `hostname="${bigip.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${ES_CONSUMER_NAME}",protocol="${proto}",msgID="${idx}"`); + }); + + /** + * Send query to ElasticSearch + * + * @param {string} searchString - search string + * + * @returns {Promise} resolved with data + */ + const esQuery = (index, searchString) => cs.http.elastic.makeRequest({ + retry: { + maxTries: 5, + delay: 30000 + }, + uri: `/${index}/_search?${searchString}` + }); + + describe('Event Listener data', () => { + const timestampStr = testDataTimestamp.toString(); + + before(() => { + assert.isOk(SERVICE_IS_READY, 'should start ElasticSearch service!'); + }); + + harness.bigip.forEach((bigip) => LISTENER_PROTOCOLS + .forEach((proto) => it( + `should check ElasticSearch for event listener data (over ${proto}) for - ${bigip.name}`, + () => esQuery(bigip.name, `size=1&q=data.protocol:${proto}%20AND%20data.testType:${ES_CONSUMER_NAME}%20AND%20data.hostname=${bigip.hostname}`) + .then((data) => { + const esData = data.hits.hits; + assert.isNotEmpty(esData, 'ElasticSearch should return search results'); + + const found = esData.some((hit) => { + const eventData = hit._source.data; + return eventData + && eventData.testDataTimestamp === timestampStr + && eventData.hostname === bigip.hostname + && eventData.protocol === proto; }); - }); - }); - }); + if (found) { + return Promise.resolve(); + } + return Promise.reject(new Error('should have events indexed from event listener data')); + }) + .catch((err) => { + bigip.logger.error('No event listener data found. Going to wait another 10sec', err); + return promiseUtils.sleepAndReject(10000, err); + }) + ))); + }); + + describe('System Poller data', () => { + before(() => { + assert.isOk(SERVICE_IS_READY, 'should start ElasticSearch service!'); }); + + harness.bigip.forEach((bigip) => it( + `should check ElasticSearch for system poller data - ${bigip.name}`, + () => esQuery(bigip.name, `size=1&q=system.hostname:${bigip.hostname}`) + .then((data) => { + const esData = data.hits.hits; + assert.isNotEmpty(esData, 'ElasticSearch should return search results'); + + const found = esData.some((hit) => { + const eventData = hit._source; + if (eventData && eventData.system + && eventData.system.hostname === bigip.hostname) { + const schema = miscUtils.readJsonFile(constants.DECL.SYSTEM_POLLER_SCHEMA); + return miscUtils.validateAgainstSchema(eventData, schema); + } + return false; + }); + if (found) { + return Promise.resolve(); + } + return Promise.reject(new Error('should have data indexed from system poller data')); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }) + )); }); } +/** + * Teardown CS + */ function teardown() { - describe('Consumer Test: ElasticSearch - teardown', () => { - it('should remove container', () => removeESContainer()); + describe('Consumer Teardown: ElasticSearch', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + + it('should stop and remove ElasticSearch docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.ElasticSearch.name + )); }); } diff --git a/test/functional/consumersTests/f5CloudTests.js b/test/functional/consumersTests/f5CloudTests.js index 151b6434..d922ab9c 100644 --- a/test/functional/consumersTests/f5CloudTests.js +++ b/test/functional/consumersTests/f5CloudTests.js @@ -8,38 +8,60 @@ 'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const scp = require('node-scp'); +/** + * ATTENTION: F5 Cloud tests disabled until F5_Cloud interactions resolved + */ + +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; -const sharedUtil = require('../shared/util'); -const util = require('../../../src/lib/utils/misc'); -const requestsUtil = require('../../../src/lib/utils/requests'); -const testUtil = require('../../unit/shared/util'); +const harnessUtils = require('../shared/harness'); +const logger = require('../shared/utils/logger').getChild('f5cloudTests'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const srcMiscUtils = require('../../../src/lib/utils/misc'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/f5cloud + */ const MODULE_REQUIREMENTS = { DOCKER: true }; -const CONSUMER_HOST = sharedUtil.getHosts('CONSUMER')[0]; // only expect one -const DUTS = sharedUtil.getHosts('BIGIP'); -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); -const F5_CLOUD_NAME = 'GRPC_F5_CLOUD'; -const MOCK_SERVER_NAME = 'grpc_mock_server'; -const PROTO_PATH = `${__dirname}/../../../src/lib/consumers/F5_Cloud/deos.proto`; -const GRPC_MOCK_SERVER_DOCKER = `${process.env[constants.ENV_VARS.ARTIFACTORY_SERVER]}/f5-magneto-docker/grpc-mock-server:1.0.1`; + +const F5_CLOUD_CONSUMER_NAME = 'GRPC_F5_CLOUD'; const GRPC_MOCK_SENDING_PORT = 4770; const GRPC_MOCK_ADMIN_PORT = 4771; -const SHOULD_RUN_TESTS = {}; +const PROTO_PATH = `${__dirname}/../../../src/lib/consumers/F5_Cloud/deos.proto`; +const REMOTE_PROTO_PATH = '/home/deos.proto'; + +const DOCKER_CONTAINERS = { + F5CloudGRPC: { + command: '/proto/deos.proto', + detach: true, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}f5-magneto-docker/grpc-mock-server:1.0.1`, + name: 'grpc-mock-server', + publish: { + [GRPC_MOCK_ADMIN_PORT]: GRPC_MOCK_ADMIN_PORT, + [GRPC_MOCK_SENDING_PORT]: GRPC_MOCK_SENDING_PORT + }, + restart: 'always', + volume: { + '/home': '/proto' + } + } +}; -let VALID_SERVICE_ACCOUNT = {}; +// read in example config +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; -function runRemoteCmd(cmd) { - return sharedUtil.performRemoteCmd( - CONSUMER_HOST.ip, - CONSUMER_HOST.username, - cmd, - { password: CONSUMER_HOST.password } - ); -} +let CONTAINER_STARTED; +let SHOULD_SKIP_DUE_VERSION; +let SERVICE_ACCOUNT = null; /* --- Notes about viktorfefilovf5/magneto-grpc-mock-server:0.0.7 --- @@ -54,65 +76,60 @@ function runRemoteCmd(cmd) { GET /clearInteractions - clear interactions */ +/** + * Setup CS and DUTs + */ function setup() { - const serviceAccount = process.env[constants.ENV_VARS.F5_CLOUD.SERVICE_ACCOUNT]; - assert.ok(serviceAccount, `should define env variable ${constants.ENV_VARS.F5_CLOUD.SERVICE_ACCOUNT} with real service account`); - const parsedServiceAccount = JSON.parse(fs.readFileSync(serviceAccount)); - parsedServiceAccount.privateKey = { - cipherText: parsedServiceAccount.privateKey - }; - VALID_SERVICE_ACCOUNT = parsedServiceAccount; - assert.ok(VALID_SERVICE_ACCOUNT.type, 'service account is not valid'); - - describe('Consumer Setup Check: check bigip requirements', () => { - DUTS.forEach((dut) => it( - `get bigip version and check if version is good for F5 Cloud - ${dut.hostalias}`, - () => sharedUtil.getBigipVersion(dut) - .then((response) => { - // F5 Cloud should support bigip 14 and above - SHOULD_RUN_TESTS[dut.hostalias] = util.compareVersionStrings(response, '>=', '14.0.0'); - }) - )); - }); + describe.skip('Consumer Setup: F5 Cloud', () => { + const harness = harnessUtils.getDefaultHarness(); + const cs = harnessUtils.getDefaultHarness().other[0]; - // .skip() until F5_Cloud interactions resolved - describe.skip('Consumer Setup: configuration', () => { - it('should pull grpc-mock-server docker image', () => runRemoteCmd(`docker pull ${GRPC_MOCK_SERVER_DOCKER}`)); + cs.http.createAndSave('f5cloud', { + port: GRPC_MOCK_ADMIN_PORT, + protocol: 'http', + retry: { + maxTries: 10, + delay: 1000 + } + }); - it('should delete proto file if exist', () => runRemoteCmd('rm -f ~/deos.proto')); + before(() => { + CONTAINER_STARTED = false; + SERVICE_ACCOUNT = null; + SHOULD_SKIP_DUE_VERSION = {}; - it('should copy proto file using node-scp', () => scp({ - host: CONSUMER_HOST.ip, - port: 22, - username: CONSUMER_HOST.username, - password: CONSUMER_HOST.password - }).then((client) => { - client.uploadFile(PROTO_PATH, '/home/ubuntu/deos.proto') - .then(() => { - client.close(); - assert(true); - }) - .catch((error) => { - assert(false, `Test Error: Couldnt upload proto files to ${CONSUMER_HOST.ip} using scp, error: ${error}`); + const envVar = miscUtils.getEnvArg(constants.ENV_VARS.F5_CLOUD.SERVICE_ACCOUNT); + logger.info('Reading service account info from file', { + envVar: constants.ENV_VARS.F5_CLOUD.SERVICE_ACCOUNT, + envVal: envVar + }); + return miscUtils.readJsonFile(envVar, true) + .then((serviceAccount) => { + assert.isDefined(serviceAccount.type, 'service account is not valid'); + SERVICE_ACCOUNT = serviceAccount; + SERVICE_ACCOUNT.privateKey = { + cipherText: SERVICE_ACCOUNT.privateKey + }; }); - }).catch((error) => { - assert(false, `Test Error: Couldnt create scp client, error: ${error}`); - })); + }); - it('should set up mock GRPC server', () => runRemoteCmd(`docker ps | grep ${MOCK_SERVER_NAME}`).then((data) => { - if (data) { - return Promise.resolve(); // exists, continue - } - return runRemoteCmd(`docker run -d -p ${GRPC_MOCK_SENDING_PORT}:${GRPC_MOCK_SENDING_PORT} -p ${GRPC_MOCK_ADMIN_PORT}:${GRPC_MOCK_ADMIN_PORT} -v /home/ubuntu:/proto --name ${MOCK_SERVER_NAME} ${GRPC_MOCK_SERVER_DOCKER} /proto/deos.proto`); - })); + // .skip() until F5_Cloud interactions resolved + describe('Docker container setup', () => { + it('should pull F5 Cloud GRPC docker image', () => cs.docker.pull(DOCKER_CONTAINERS.F5CloudGRPC.image)); - it('should add stub to mock server', () => { - const options = { - method: 'POST', - fullURI: `http://${CONSUMER_HOST.ip}:${GRPC_MOCK_ADMIN_PORT}/add`, - headers: { - 'Content-Type': 'application/json' - }, + it('should delete proto file if exist', () => cs.ssh.default.unlinkIfExists(REMOTE_PROTO_PATH)); + + it('should copy proto file', () => cs.ssh.default.copyFileToRemote(PROTO_PATH, REMOTE_PROTO_PATH)); + + it('should start new F5 Cloud GRPC docker container', () => harnessUtils.docker.startNewContainer( + cs.docker, + DOCKER_CONTAINERS.F5CloudGRPC + ) + .then(() => { + CONTAINER_STARTED = true; + })); + + it('should add stub to mock server', () => cs.http.f5cloud.makeRequest({ body: { service: 'Ingestion', method: 'Post', @@ -124,86 +141,143 @@ function setup() { output: { data: {} } - } - }; - return requestsUtil.makeRequest(options); + }, + headers: { + 'Content-Type': 'application/json' + }, + json: true, + method: 'POST', + uri: '/add' + })); + }); + + describe('Gather information about DUTs version', () => { + harness.bigip.forEach((bigip) => it( + `should get bigip version and check if version is high enough for F5 Cloud - ${bigip.name}`, + () => bigip.icAPI.default.getSoftwareVersion() + .then((version) => { + // OpenTelemetry Exporter consumer is supported on bigip 14.1 and above + SHOULD_SKIP_DUE_VERSION[bigip.hostname] = srcMiscUtils.compareVersionStrings(version, '<', '14.0'); + + logger.info('DUT\' version', { + hostname: bigip.hostname, + shouldSkipTests: SHOULD_SKIP_DUE_VERSION[bigip.hostname], + version + }); + }) + )); }); }); } +/** + * Tests for DUTs + */ function test() { - const testDataTimestamp = Date.now(); - const msg = (hostName) => `hostname="${hostName}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${F5_CLOUD_NAME}"`; - - // .skip() until F5_Cloud interactions resolved - describe.skip('Consumer Test: F5 Cloud - Configure TS', () => { - DUTS.forEach((dut) => it(`should configure TS - ${dut.hostalias}`, function () { - if (!SHOULD_RUN_TESTS[dut.hostalias]) { - this.skip(); - } - const consumerDeclaration = sharedUtil.deepCopy(DECLARATION); - consumerDeclaration[F5_CLOUD_NAME] = { - allowSelfSignedCert: true, - class: 'Telemetry_Consumer', - type: 'F5_Cloud', - enable: true, - trace: true, - f5csTenantId: 'a-blabla-a', - f5csSensorId: '12345', - payloadSchemaNid: 'f5', - serviceAccount: testUtil.deepCopy(VALID_SERVICE_ACCOUNT), - targetAudience: CONSUMER_HOST.ip, - useSSL: false, - port: GRPC_MOCK_SENDING_PORT - }; - return dutUtils.postDeclarationToDUT(dut, sharedUtil.deepCopy(consumerDeclaration)); - })); - DUTS.forEach((dut) => it(`should send event to TS Event Listener - ${dut.hostalias}`, function () { - if (!SHOULD_RUN_TESTS[dut.hostalias]) { - this.skip(); - } - return dutUtils.sendDataToEventListener(dut, `${msg(dut.hostname)}`); - })); - }); + describe.skip('Consumer Test: F5 Cloud', () => { + const harness = harnessUtils.getDefaultHarness(); + const cs = harness.other[0]; + const testDataTimestamp = Date.now(); - // .skip() until F5_Cloud interactions resolved - describe.skip('Consumer Test: F5 Cloud - Test', () => { - DUTS.forEach((dut) => it(`should find the right interactions on mock server - ${dut.hostalias}`, function () { - if (!SHOULD_RUN_TESTS[dut.hostalias]) { - this.skip(); - } - const options = { - method: 'GET', - fullURI: `http://${CONSUMER_HOST.ip}:${GRPC_MOCK_ADMIN_PORT}/interactions`, - headers: {} - }; - const responseDataJSONList = []; - return requestsUtil.makeRequest(options).then((responseList) => { - if (responseList && responseList.length > 0) { - responseList.forEach((response) => { - assert(response.service === 'Ingestion', `Test Error: Incorrect service name, should be 'Ingestion', got '${response.service}'`); - assert(response.method === 'Post', `Test Error: Incorrect method name, should be 'Post', got '${response.method}'`); - assert(response.data.account_id === 'urn:f5_cs::account:a-blabla-a', `Test Error: Incorrect method name, should be 'urn:f5_cs::account:a-blabla-a', got '${response.data.account_id}'`); - const stringData = Buffer.from(response.data.payload, 'base64').toString(); // decode base64 - const jsonData = JSON.parse(stringData); - if (jsonData.testType === F5_CLOUD_NAME) { - responseDataJSONList.push(jsonData); - } - }); - assert(responseDataJSONList.some((responseDataJSON) => responseDataJSON.hostname === dut.hostname), `Test Error: ${dut.hostname} does not exist`); - assert(responseDataJSONList.every((responseDataJSON) => responseDataJSON.testDataTimestamp === testDataTimestamp.toString()), `Test Error: testDataTimestamp should be ${testDataTimestamp}`); - } else { - assert(false, 'no response from mock server'); - } + /** + * @returns {boolean} true if DUt satisfies version restriction + */ + const isValidDut = (dut) => !SHOULD_SKIP_DUE_VERSION[dut.hostname]; + + before(() => { + assert.isOk(CONTAINER_STARTED, 'should start F5 Cloud GRPC container!'); + assert.isNotNull(SERVICE_ACCOUNT, 'should fetch F5 Cloud API metadata from process.env'); + }); + + describe('Configure TS and generate data', () => { + let consumerDeclaration; + + before(() => { + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[F5_CLOUD_CONSUMER_NAME] = { + allowSelfSignedCert: true, + class: 'Telemetry_Consumer', + type: 'F5_Cloud', + enable: true, + trace: true, + f5csTenantId: 'a-blabla-a', + f5csSensorId: '12345', + payloadSchemaNid: 'f5', + serviceAccount: miscUtils.deepCopy(SERVICE_ACCOUNT), + targetAudience: cs.host.host, + useSSL: false, + port: GRPC_MOCK_SENDING_PORT + }; }); - })); + + testUtils.shouldConfigureTS(harness.bigip, (bigip) => (isValidDut(bigip) + ? miscUtils.deepCopy(consumerDeclaration) + : null)); + + testUtils.shouldSendListenerEvents(harness.bigip, (bigip, proto, port, idx) => (isValidDut(bigip) + ? `functionalTestMetric="147",EOCTimestamp="1231232",hostname="${bigip.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${F5_CLOUD_CONSUMER_NAME}",protocol="${proto}",msgID="${idx}"` + : null)); + }); + + describe('Event Listener data', () => { + harness.bigip.forEach((bigip) => LISTENER_PROTOCOLS + .forEach((proto) => it( + `should check F5 Cloud GRCP server for event listener data (over ${proto}) for - ${bigip.name}`, + function () { + if (!isValidDut(bigip)) { + return this.skip(); + } + return cs.http.otel.makeRequest({ + headers: {}, + method: 'GET', + uri: '/interactions' + }) + .then((data) => { + assert.isArray(data, 'should be array'); + assert.isNotEmpty(data, 'should not be empty'); + + const responseDataJSONList = []; + data.forEach((response) => { + assert(response.service === 'Ingestion', `Test Error: Incorrect service name, should be 'Ingestion', got '${response.service}'`); + assert(response.method === 'Post', `Test Error: Incorrect method name, should be 'Post', got '${response.method}'`); + assert(response.data.account_id === 'urn:f5_cs::account:a-blabla-a', `Test Error: Incorrect method name, should be 'urn:f5_cs::account:a-blabla-a', got '${response.data.account_id}'`); + const stringData = Buffer.from(response.data.payload, 'base64').toString(); // decode base64 + const jsonData = JSON.parse(stringData); + if (jsonData.testType === F5_CLOUD_CONSUMER_NAME) { + responseDataJSONList.push(jsonData); + } + }); + assert.isOk( + responseDataJSONList.some((responseDataJSON) => ( + responseDataJSON.hostname === bigip.hostname + && responseDataJSON.testDataTimestamp === testDataTimestamp.toString() + && responseDataJSON.protocol === proto)), + `Test Error: no valid event listener data for ${bigip.hostname}` + ); + }) + .catch((err) => { + bigip.logger.info('No event listener data found. Going to wait another 20sec'); + return promiseUtils.sleepAndReject(20000, err); + }); + } + ))); + }); }); } +/** + * Teardown CS + */ function teardown() { - // .skip() until F5_Cloud interactions resolved - describe.skip('Consumer Test: teardown mock server', () => { - it(`should remove ${MOCK_SERVER_NAME} container`, () => runRemoteCmd(`docker container rm -f ${MOCK_SERVER_NAME}`)); + describe.skip('Consumer Teardown: F5 Cloud', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + + it('should stop and remove F5 Cloud GRPC docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.F5CloudGRPC.name + )); + + it('should remove GRPC proto file', () => cs.ssh.default.unlinkIfExists(REMOTE_PROTO_PATH)); }); } diff --git a/test/functional/consumersTests/fluentdTests.js b/test/functional/consumersTests/fluentdTests.js index cf47a3f5..763f3daa 100644 --- a/test/functional/consumersTests/fluentdTests.js +++ b/test/functional/consumersTests/fluentdTests.js @@ -6,25 +6,28 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const util = require('../shared/util'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); +const pathUtil = require('path'); + const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/fluentd + */ // module requirements const MODULE_REQUIREMENTS = { DOCKER: true }; -const DUTS = util.getHosts('BIGIP'); - -const CONSUMER_HOST = util.getHosts('CONSUMER')[0]; // only expect one -const FLUENTD_IMAGE_NAME = `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}fluent/fluentd:v1.6-debian-1`; -const FLUENTD_NAME = 'fluentd-server'; -const FLUENTD_HOST = CONSUMER_HOST.ip; const FLUENTD_FOLDER = 'fluentd'; const FLUENTD_CONF_FILE = 'fluentd.conf'; const FLUENTD_PROTOCOL = 'http'; @@ -41,136 +44,191 @@ const FLUENTD_CONF = ` @type stdout `; +const DOCKER_CONTAINERS = { + FluentD: { + detach: true, + env: { + FLUENTD_CONF: FLUENTD_CONF_FILE + }, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}fluent/fluentd:v1.6-debian-1`, + name: 'fluentd-server', + publish: { + [FLUENTD_PORT]: FLUENTD_PORT + }, + restart: 'always', + volume: { + [`$(pwd)/${FLUENTD_FOLDER}`]: '/fluentd/etc' + } + } +}; + // read in example config -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; -function runRemoteCmd(cmd) { - return util.performRemoteCmd(CONSUMER_HOST.ip, CONSUMER_HOST.username, cmd, { password: CONSUMER_HOST.password }); -} +let CONTAINER_STARTED; +/** + * Setup CS and DUTs + */ function setup() { describe('Consumer Setup: Fluentd', () => { - it('should pull Fluentd docker image', () => runRemoteCmd(`docker pull ${FLUENTD_IMAGE_NAME}`)); + const cs = harnessUtils.getDefaultHarness().other[0]; - it('should write Fluentd configuration', () => runRemoteCmd(`mkdir ${FLUENTD_FOLDER} && echo "${FLUENTD_CONF}" > ${FLUENTD_FOLDER}/${FLUENTD_CONF_FILE}`)); - - it('should start Fluentd docker containers', () => { - const fluentdParams = `-p ${FLUENTD_PORT}:${FLUENTD_PORT} -v $(pwd)/${FLUENTD_FOLDER}:/fluentd/etc -e FLUENTD_CONF=${FLUENTD_CONF_FILE}`; - const cmdFluentd = `docker run -d ${fluentdParams} --name ${FLUENTD_NAME} ${FLUENTD_IMAGE_NAME}`; + describe('Docker container setup', () => { + before(() => { + CONTAINER_STARTED = false; + }); - // simple check to see if fluentd container already exists - return runRemoteCmd(`docker ps | grep ${FLUENTD_NAME}`) - .then((data) => { - if (data) { - return Promise.resolve(); // exists, continue - } - return runRemoteCmd(cmdFluentd); - }); + it('should pull Fluentd docker image', () => cs.docker.pull(DOCKER_CONTAINERS.FluentD.image)); + + it('should remove pre-existing FluentD docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.FluentD.name + )); + + it('should write Fluentd configuration', () => cs.ssh.default.mkdirIfNotExists(FLUENTD_FOLDER) + .then(() => cs.ssh.default.writeToFile( + pathUtil.join(FLUENTD_FOLDER, FLUENTD_CONF_FILE), + FLUENTD_CONF + ))); + + it('should start new Fluentd docker container', () => harnessUtils.docker.startNewContainer( + cs.docker, + DOCKER_CONTAINERS.FluentD + ) + .then(() => { + CONTAINER_STARTED = true; + })); }); }); } +/** + * Tests for DUTs + */ function test() { - const testDataTimestamp = Date.now(); - - describe('Consumer Test: Fluentd - Configure TS and generate data', () => { - const consumerDeclaration = util.deepCopy(DECLARATION); - consumerDeclaration[FLUENTD_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'Generic_HTTP', - host: FLUENTD_HOST, - protocol: FLUENTD_PROTOCOL, - port: FLUENTD_PORT, - path: `/${FLUENTD_TAG_NAME}`, - headers: [ - { - name: 'Content-Type', - value: 'application/json' - } - ] - }; - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(consumerDeclaration)) - )); + describe('Consumer Test: Fluentd', () => { + const harness = harnessUtils.getDefaultHarness(); + const cs = harness.other[0]; + const testDataTimestamp = Date.now(); - it('should send event to TS Event Listener', () => { - const msg = `testDataTimestamp="${testDataTimestamp}",test="true",testType="${FLUENTD_CONSUMER_NAME}"`; - return dutUtils.sendDataToEventListeners((dut) => `hostname="${dut.hostname}",${msg}`); + before(() => { + assert.isOk(CONTAINER_STARTED, 'should start FluentD container!'); }); - }); - describe('Consumer Test: Fluentd - Tests', () => { - const systemPollerData = {}; - const fluentLogs = []; - - before(() => new Promise((resolve) => { setTimeout(resolve, 30 * 1000); }) - .then(() => dutUtils.getSystemPollersData((hostObj, data) => { - systemPollerData[hostObj.hostname] = data[0]; - }))); - - it('should get log data from Fluentd stdout', () => runRemoteCmd(`docker logs ${FLUENTD_NAME}`) - .then((data) => { - util.logger.info('Fluentd docker logs:', data); - // push all relevant logs lines into log array for later tests - const fluentdLogRegex = /\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}.\d{1,10}\s\+\d{1,5} /; - const logLines = data.split(fluentdLogRegex); - logLines.forEach((line) => { - if (line.startsWith(FLUENTD_TAG_NAME)) { - fluentLogs.push(JSON.parse(line.split(`${FLUENTD_TAG_NAME}:`).pop().trim())); - } - }); - if (!fluentLogs.length > 0) { - assert.fail(`Did not find any log lines with the configured Fluentd tag name: ${FLUENTD_TAG_NAME}`); - } - })); - - DUTS.forEach((dut) => { - it(`should have system poller config for - ${dut.hostalias}`, () => { - const hostname = systemPollerData[dut.hostname]; - assert.notStrictEqual(hostname, undefined); + describe('Configure TS and generate data', () => { + let consumerDeclaration; + + before(() => { + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[FLUENTD_CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'Generic_HTTP', + host: cs.host.host, + protocol: FLUENTD_PROTOCOL, + port: FLUENTD_PORT, + path: `/${FLUENTD_TAG_NAME}`, + headers: [ + { + name: 'Content-Type', + value: 'application/json' + } + ] + }; }); - it(`should check fluentd for event listener data for - ${dut.hostalias}`, () => { - let found = false; - fluentLogs.forEach((logEntry) => { - if (logEntry.test === 'true' && logEntry.hostname === dut.hostname) { - assert.strictEqual(logEntry.testDataTimestamp, testDataTimestamp.toString()); - assert.strictEqual(logEntry.testType, FLUENTD_CONSUMER_NAME); - found = true; - } - }); - if (!found) { - return Promise.reject(new Error('Fluentd log should include event data')); - } - return Promise.resolve(); - }); + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(consumerDeclaration)); + testUtils.shouldSendListenerEvents(harness.bigip, (bigip, proto, port, idx) => `hostname="${bigip.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${FLUENTD_CONSUMER_NAME}",protocol="${proto}",msgID="${idx}"`); + }); - it(`should check fluentd for system poller data for - ${dut.hostalias}`, () => { - let found = false; - fluentLogs.forEach((logEntry) => { - if (logEntry.system && logEntry.system.hostname === dut.hostname) { - const schema = JSON.parse(fs.readFileSync(constants.DECL.SYSTEM_POLLER_SCHEMA)); - const valid = util.validateAgainstSchema(logEntry, schema); - if (valid !== true) { - assert.fail(`output is not valid: ${JSON.stringify(valid.errors)}`); + /** + * Fetch logs from FluentD container + * + * @param {function} filter - function to filer logs + * + * @returns {Promise>} resolved with parsed logs + */ + function getContainerLogs(filter) { + return cs.docker.containerLogs(DOCKER_CONTAINERS.FluentD.name) + .then((data) => { + cs.logger.info('Fluentd docker logs:', data); + data = data.stdout; + // push all relevant logs lines into log array for later tests + const fluentdLogRegex = /\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}.\d{1,10}\s\+\d{1,5} /; + const logLines = data.split(fluentdLogRegex); + const validLogs = []; + logLines.forEach((line) => { + if (line.startsWith(FLUENTD_TAG_NAME)) { + const parsed = JSON.parse(line.split(`${FLUENTD_TAG_NAME}:`).pop().trim()); + if (!filter || filter(parsed, line)) { + validLogs.push(parsed); + } } - found = true; - } + }); + return validLogs; }); - if (!found) { - return Promise.reject(new Error('Fluentd log should include event data')); - } - return Promise.resolve(); - }); + } + + describe('Event Listener data', () => { + const timestampStr = testDataTimestamp.toString(); + + harness.bigip.forEach((bigip) => LISTENER_PROTOCOLS + .forEach((proto) => it( + `should check FluentD for event listener data (over ${proto}) for - ${bigip.name}`, + () => getContainerLogs((log) => log.test === 'true' + && log.hostname === bigip.hostname + && log.protocol === proto + && log.testDataTimestamp === timestampStr + && log.testType === FLUENTD_CONSUMER_NAME) + .then((logs) => { + if (logs.length === 0) { + bigip.logger.info('No event listener data found. Going to wait another 10sec'); + return promiseUtils.sleepAndReject(10000, `should have event(s) for a data from event listener (over ${proto})`); + } + return Promise.resolve(); + }) + ))); + }); + + describe('System Poller data', () => { + harness.bigip.forEach((bigip) => it( + `should check FluentD for system poller data - ${bigip.name}`, + () => getContainerLogs((log) => { + if (typeof log.system === 'object' && log.system.hostname === bigip.hostname) { + const schema = miscUtils.readJsonFile(constants.DECL.SYSTEM_POLLER_SCHEMA); + return miscUtils.validateAgainstSchema(log, schema); + } + return false; + }) + .then((logs) => { + if (logs.length === 0) { + bigip.logger.info('No system poller data found. Going to wait another 10sec'); + // more sleep time for system poller + return promiseUtils.sleepAndReject(20000, 'should have event(s) for a data from system poller'); + } + return Promise.resolve(); + }) + )); }); }); } +/** + * Teardown CS + */ function teardown() { describe('Consumer Teardown: Fluentd', () => { - it('should remove container', () => runRemoteCmd(`docker container rm -f ${FLUENTD_NAME}`)); - it('should remove Fluentd configuration file', () => runRemoteCmd(`rm -rf ${FLUENTD_FOLDER}`)); + const cs = harnessUtils.getDefaultHarness().other[0]; + + it('should stop and remove FluentD docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.FluentD.name + )); + + it('should remove Fluentd configuration file', () => cs.ssh.default.unlinkIfExists(pathUtil.join(FLUENTD_FOLDER, FLUENTD_CONF_FILE))); + + it('should remove Fluentd directory', () => cs.ssh.default.rmdirIfExists(FLUENTD_FOLDER)); }); } diff --git a/test/functional/consumersTests/googleCloudMonitoringTests.js b/test/functional/consumersTests/googleCloudMonitoringTests.js index d7781001..5dce257d 100644 --- a/test/functional/consumersTests/googleCloudMonitoringTests.js +++ b/test/functional/consumersTests/googleCloudMonitoringTests.js @@ -8,127 +8,116 @@ 'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const jwt = require('jsonwebtoken'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; -const sharedUtil = require('../shared/util'); -const requestsUtil = require('../../../src/lib/utils/requests'); +const gcpUtil = require('../shared/cloudUtils/gcp'); +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; -const DUTS = sharedUtil.getHosts('BIGIP'); +/** + * @module test/functional/consumersTests/googleCloudMonitoring + */ -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); -const PROJECT_ID = process.env[constants.ENV_VARS.GCP.PROJECT_ID]; -const PRIVATE_KEY_ID = process.env[constants.ENV_VARS.GCP.PRIVATE_KEY_ID]; -const PRIVATE_KEY = process.env[constants.ENV_VARS.GCP.PRIVATE_KEY].replace(/REPLACE/g, '\n'); -const SERVICE_EMAIL = process.env[constants.ENV_VARS.GCP.SERVICE_EMAIL]; const GOOGLE_SD_CONSUMER_NAME = 'Google_SD_Consumer'; -let accessToken; +// read in example config +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +let ACCESS_TOKEN = null; +let GCP = null; +/** + * Setup CS and DUTs + */ function setup() { - describe('Consumer Setup: Google Cloud Monitoring - access token', () => { - it('should get access token', () => { - const newJwt = jwt.sign( - { - iss: SERVICE_EMAIL, - scope: 'https://www.googleapis.com/auth/monitoring', - aud: 'https://oauth2.googleapis.com/token', - exp: Math.floor(Date.now() / 1000) + 3600, - iat: Math.floor(Date.now() / 1000) - }, - PRIVATE_KEY, - { - algorithm: 'RS256', - header: { - kid: PRIVATE_KEY_ID, - typ: 'JWT', - alg: 'RS256' - } - } - ); - const options = { - method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded' - }, - form: { - grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', - assertion: newJwt - }, - fullURI: 'https://oauth2.googleapis.com/token' - }; - return requestsUtil.makeRequest(options) - .then((result) => { - accessToken = result.access_token; - }) - .catch((err) => { - sharedUtil.logger.error(`Unable to get access token: ${err}`); - return Promise.reject(err); + describe('Consumer Setup: Google Cloud Monitoring', () => { + before(() => { + ACCESS_TOKEN = null; + return gcpUtil.getMetadataFromProcessEnv() + .then((gcpData) => { + GCP = gcpData; }); }); + + it('should get access token', () => gcpUtil.getOAuthToken( + GCP.serviceEmail, + GCP.privateKey, + GCP.privateKeyID + ) + .then((accessToken) => { + ACCESS_TOKEN = accessToken; + })); }); } - +/** + * Tests for DUTs + */ function test() { - describe('Consumer Test: Google Cloud Monitoring - Configure TS', () => { - const consumerDeclaration = sharedUtil.deepCopy(DECLARATION); - consumerDeclaration[GOOGLE_SD_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'Google_Cloud_Monitoring', - privateKey: { - cipherText: PRIVATE_KEY - }, - projectId: PROJECT_ID, - serviceEmail: SERVICE_EMAIL, - privateKeyId: PRIVATE_KEY_ID - }; - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, sharedUtil.deepCopy(consumerDeclaration)) - )); - }); + describe('Consumer Test: Google Cloud Monitoring', () => { + const harness = harnessUtils.getDefaultHarness(); - describe('Consumer Test: Google Cloud Monitoring - Test', () => { - const queryGoogle = (queryString) => { - const options = { - fullURI: `https://monitoring.googleapis.com/v3/projects/${PROJECT_ID}/timeSeries?${queryString}`, - headers: { - Authorization: `Bearer ${accessToken}` - } - }; - return requestsUtil.makeRequest(options); - }; - - DUTS.forEach((dut) => { - it(`should check for system poller data from:${dut.hostalias}`, () => { - let timeStart = new Date(); - let timeEnd = new Date(); - timeStart.setMinutes(timeEnd.getMinutes() - 5); - timeStart = timeStart.toJSON(); - timeEnd = timeEnd.toJSON(); - const queryString = [ - `interval.startTime=${timeStart}`, - `interval.endTime=${timeEnd}`, - `filter=metric.type="custom.googleapis.com/system/tmmCpu" AND resource.labels.namespace="${dut.hostname}"` - ].join('&'); - return new Promise((resolve) => { setTimeout(resolve, 30000); }) - .then(() => queryGoogle(queryString)) - .then((timeSeries) => { - sharedUtil.logger.info('Response from Google Cloud Monitoring:', { hostname: dut.hostname, timeSeries }); - assert.notEqual(timeSeries.timeSeries[0].points[0], undefined); - assert.equal(dut.hostname, timeSeries.timeSeries[0].resource.labels.namespace); - }); + before(() => { + assert.isNotNull(ACCESS_TOKEN, 'should acquire GCP auth token'); + assert.isNotNull(GCP, 'should fetch GCP API metadata from process.env'); + }); + + describe('Configure TS and generate data', () => { + let consumerDeclaration; + + before(() => { + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[GOOGLE_SD_CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'Google_Cloud_Monitoring', + privateKey: { + cipherText: GCP.privateKey + }, + projectId: GCP.projectID, + serviceEmail: GCP.serviceEmail, + privateKeyId: GCP.privateKeyID + }; }); + + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(consumerDeclaration)); + }); + + describe('System Poller data', () => { + harness.bigip.forEach((bigip) => it( + `should check Google Cloud Monitoring for system poller data - ${bigip.name}`, + () => { + let timeStart = new Date(); + let timeEnd = new Date(); + timeStart.setMinutes(timeEnd.getMinutes() - 5); + timeStart = timeStart.toJSON(); + timeEnd = timeEnd.toJSON(); + const queryString = [ + `interval.startTime=${timeStart}`, + `interval.endTime=${timeEnd}`, + `filter=metric.type="custom.googleapis.com/system/tmmCpu" AND resource.labels.namespace="${bigip.hostname}"` + ].join('&'); + + return gcpUtil.queryCloudMonitoring(ACCESS_TOKEN, GCP.projectID, queryString) + .then((timeSeries) => { + assert.isDefined(timeSeries.timeSeries[0].points[0]); + assert.deepStrictEqual(bigip.hostname, timeSeries.timeSeries[0].resource.labels.namespace); + }) + .catch((err) => { + bigip.logger.error('No system poller data found. Going to wait another 20sec', err); + return promiseUtils.sleepAndReject(20000, err); + }); + } + )); }); }); } -function teardown() {} - module.exports = { setup, - test, - teardown + test }; diff --git a/test/functional/consumersTests/kafkaTests.js b/test/functional/consumersTests/kafkaTests.js index b834e84d..fdeedce2 100644 --- a/test/functional/consumersTests/kafkaTests.js +++ b/test/functional/consumersTests/kafkaTests.js @@ -6,125 +6,390 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; -const assert = require('assert'); -const fs = require('fs'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); const kafka = require('kafka-node'); -const util = require('../shared/util'); + const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const logger = require('../shared/utils/logger').getChild('kafkaTests'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/kafka + */ // module requirements const MODULE_REQUIREMENTS = { DOCKER: true }; -const DUTS = util.getHosts('BIGIP'); -const CONSUMER_HOST = util.getHosts('CONSUMER')[0]; // only expect one -const KAFKA_IMAGE_NAME = `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}bitnami/kafka:latest`; -const ZOOKEEPER_IMAGE_NAME = `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}bitnami/zookeeper:latest`; -const ZOOKEEPER_NAME = 'zookeeper-server'; -const KAFKA_NAME = 'kafka-server'; -const KAFKA_HOST = CONSUMER_HOST.ip; -const KAFKA_PROTOCOL = 'binaryTcp'; -const KAFKA_PORT = '9092'; -const KAFKA_TOPIC = 'f5-telemetry'; const KAFKA_AUTH_PROTOCOL = 'None'; const KAFKA_CONSUMER_NAME = 'Consumer_Kafka'; +const KAFKA_PORT = 9092; +const KAFKA_PROTOCOL = 'binaryTcp'; +const KAFKA_TOPIC = 'f5-telemetry'; +const KAFKA_TIMEOUT = 2000; +const ZOOKEEPER_CLIENT_PORT = 2181; +const DOCKER_CONTAINERS = { + Kafka: { + detach: true, + env: { + ALLOW_PLAINTEXT_LISTENER: 'yes', + KAFKA_ADVERTISED_LISTENERS: null, + KAFKA_ZOOKEEPER_CONNECT: null + }, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}bitnami/kafka:latest`, + name: 'kafka-server', + publish: { + [KAFKA_PORT]: KAFKA_PORT + }, + restart: 'always' + }, + Zookeeper: { + detach: true, + env: { + ALLOW_ANONYMOUS_LOGIN: 'yes', + ZOOKEEPER_CLIENT_PORT + }, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}bitnami/zookeeper:latest`, + name: 'zookeeper-server', + publish: { + [ZOOKEEPER_CLIENT_PORT]: ZOOKEEPER_CLIENT_PORT + }, + restart: 'always' + } +}; // read in example config -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; -function runRemoteCmd(cmd) { - return util.performRemoteCmd(CONSUMER_HOST.ip, CONSUMER_HOST.username, cmd, { password: CONSUMER_HOST.password }); -} +/** + * Kafka Client for tests + */ +class KafkaClient { + /** + * Constructor + * + * @param {string} host - Kafka host + * @param {integer} port - port + * @param {string} topic - topic to listen for + */ + constructor(host, port, topic) { + this.client = new kafka.KafkaClient({ + autoConnect: false, + connectTimeout: KAFKA_TIMEOUT, + kafkaHost: `${host}:${port}`, + requestTimeout: KAFKA_TIMEOUT + }); + this.logger = logger.getChild('kafkaClient'); + this.messageMap = new Map(); + this.partition = 0; + this.topic = topic; -function setup() { - describe('Consumer Setup: Kafka', () => { - [KAFKA_IMAGE_NAME, ZOOKEEPER_IMAGE_NAME].forEach( - (imageName) => it(`should pull ${imageName} docker image`, () => runRemoteCmd(`docker pull ${imageName}`)) - ); - - it('should start Zookeeper and Kafka docker containers', () => { - const zookeeperParams = '-e ALLOW_ANONYMOUS_LOGIN=yes -e ZOOKEEPER_CLIENT_PORT:2181 -p 2181:2181'; - const cmdZookeeper = `docker run -d ${zookeeperParams} --name ${ZOOKEEPER_NAME} ${ZOOKEEPER_IMAGE_NAME}`; - const kafkaParams = `-e ALLOW_PLAINTEXT_LISTENER=yes -e KAFKA_ZOOKEEPER_CONNECT=${KAFKA_HOST}:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://${KAFKA_HOST}:${KAFKA_PORT} -p ${KAFKA_PORT}:${KAFKA_PORT}`; - const cmdKafka = `docker run -d ${kafkaParams} --name ${KAFKA_NAME} ${KAFKA_IMAGE_NAME}`; - - // simple check to see if zookeeper already exists - return runRemoteCmd(`docker ps | grep ${ZOOKEEPER_NAME}`) - .then((data) => { - if (data) { - return Promise.resolve(); // exists, continue + this.client.on('error', (clientErr) => { + this.logger.error('Kafka-Client error caught', clientErr); + }); + } + + /** + * Close connection to Kafka + * + * @returns {Promise} resolved once closed + */ + close() { + return new Promise((resolve) => { + this.logger.info('Closing Kafka-Client...'); + this.client.close(() => { + this.logger.info('Kafka-Client closed!'); + resolve(); + }); + }); + } + + /** + * Connect to Kafka + * + * @returns {Promise} resolved once connected + */ + connect() { + return new Promise((resolve, reject) => { + const onError = (clientErr) => { + // eslint-disable-next-line no-use-before-define + this.client.removeListener('ready', onReady); + reject(clientErr); + }; + const onReady = () => { + this.logger.info('Kafka-Client is ready!'); + this.client.removeListener('error', onError); + resolve(); + }; + this.client.once('error', onError); + this.client.once('ready', onReady); + + this.logger.info('Connection to Kafka server'); + this.client.connect(); + }) + .then(() => new Promise((resolve, reject) => { + this.logger.info('Receiving latest offset for topic', { topic: this.topic }); + const offset = new kafka.Offset(this.client); + // eslint-disable-next-line consistent-return + offset.fetchLatestOffsets([this.topic], (offsetErr, offsets) => { + if (offsetErr) { + reject(offsetErr); + } else { + this.latestOffset = (offsets[this.topic] && offsets[this.topic][this.partition]) || 0; + this.logger.info('Kafka-Offset returned latest offset', { latestOffset: this.latestOffset }); + resolve(); } - return runRemoteCmd(cmdZookeeper); - }) - .then(() => runRemoteCmd(`docker ps | grep ${KAFKA_NAME}`)) - .then((data) => { - if (data) { - return Promise.resolve(); // exists, continue + }); + })) + .then(() => new Promise((resolve, reject) => { + this.logger.info('Creating topic', { topic: this.topic }); + this.client.createTopics([this.topic], (topicErr) => { + if (topicErr) { + reject(topicErr); + } else { + resolve(); + } + }); + })) + .then(() => new Promise((resolve, reject) => { + this.logger.info('Creating new consumer...'); + this.consumer = new kafka.Consumer( + this.client, + [{ + partition: this.partition, + topic: this.topic + }], + { + autoCommit: false, + fetchMaxBytes: 1024 * 1024 * 1024 // should be enough to fetch all messages } - return runRemoteCmd(cmdKafka); + ); + this.consumer.on('error', (consumerErr) => { + this.logger.error('Kafka-Consumer caught error', consumerErr); }); + const onError = (consumerErr) => { + reject(consumerErr); + }; + this.consumer.once('error', onError); + this.consumer.addTopics([this.topic], (topicErr) => { + this.consumer.removeListener('error', onError); + if (topicErr) { + onError(topicErr); + } else { + this.logger.info('New consumer created!'); + this.consumer.on('message', (message) => { + if (message.offset >= this.latestOffset && !this.messageMap.has(message.offset)) { + this.logger.info('Kafka message received:', message); + try { + this.messageMap.set(message.offset, JSON.parse(message.value)); + } catch (_) { + // do nothing + } + } + }); + resolve(); + } + }); + })); + } + + /** + * Received messages + * + * @param {function} [filter] - filter + * + * @returns {Promise>} resolved with messages + */ + getMessages(filter) { + return new Promise((resolve) => { + const it = this.messageMap.values(); + const msgs = []; + + let result = it.next(); + while (!result.done) { + if (!filter || filter(result.value)) { + msgs.push(result.value); + } + result = it.next(); + } + resolve(msgs); + }); + } +} + +let CONTAINERS_STARTED; + +/** + * Setup CS and DUTs + */ +function setup() { + describe('Consumer Setup: Kafka', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + + DOCKER_CONTAINERS.Kafka.env.KAFKA_ADVERTISED_LISTENERS = `PLAINTEXT://${cs.host.host}:${KAFKA_PORT}`; + DOCKER_CONTAINERS.Kafka.env.KAFKA_ZOOKEEPER_CONNECT = `${cs.host.host}:${ZOOKEEPER_CLIENT_PORT}`; + + describe('Docker container setup', () => { + before(() => { + CONTAINERS_STARTED = []; + }); + + after(() => { + CONTAINERS_STARTED = CONTAINERS_STARTED.every((v) => v); + }); + + Object.keys(DOCKER_CONTAINERS).forEach((serviceName) => it( + `should pull ${serviceName} docker image`, + () => cs.docker.pull(DOCKER_CONTAINERS[serviceName].image) + )); + Object.keys(DOCKER_CONTAINERS).forEach((serviceName) => it( + `should remove pre-existing ${serviceName} docker container`, + () => harnessUtils.docker.stopAndRemoveContainer(cs.docker, DOCKER_CONTAINERS[serviceName].name) + )); + + // order matters + ['Zookeeper', 'Kafka'].forEach((serviceName) => it( + `should start new ${serviceName} docker container`, + () => harnessUtils.docker.startNewContainer(cs.docker, DOCKER_CONTAINERS[serviceName]) + .then(() => CONTAINERS_STARTED.push(true)) + )); }); }); } +/** + * Tests for DUTs + */ function test() { - // const testType = 'Kafka_Consumer_Test'; - // const dataTimestamp = (new Date()).getTime(); - describe('Consumer Test: Kafka', () => { - const consumerDeclaration = util.deepCopy(DECLARATION); - consumerDeclaration[KAFKA_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'Kafka', - host: KAFKA_HOST, - protocol: KAFKA_PROTOCOL, - port: KAFKA_PORT, - topic: KAFKA_TOPIC, - authenticationProtocol: KAFKA_AUTH_PROTOCOL - }; - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(consumerDeclaration)) - )); - - it('should receive message on Kafka consumer', function (done) { - this.retries(0); - this.timeout('120s'); - const client = new kafka.KafkaClient( - { kafkaHost: `${KAFKA_HOST}:${KAFKA_PORT}` } - ); - client.createTopics([KAFKA_TOPIC], () => { - const Consumer = kafka.Consumer; - const consumer = new Consumer( - client, - [ - { topic: KAFKA_TOPIC, partition: 0 } - ], - { autoCommit: false } - ); - consumer.addTopics([KAFKA_TOPIC], () => { - consumer.on('message', (message) => { - util.logger.info('Kafka message received:', message); - consumer.removeAllListeners(); - consumer.close(); - const parsedMessage = JSON.parse(message.value); - assert.notStrictEqual(parsedMessage.system, undefined, `Did not receive expected TS message. Instead received: ${JSON.stringify(message)}`); - done(); - }); - }); + const harness = harnessUtils.getDefaultHarness(); + const cs = harness.other[0]; + const testDataTimestamp = Date.now(); + let kafkaClient = null; + + before(() => { + assert.isOk(CONTAINERS_STARTED, 'should start Kafka and Zookeeper containers!'); + }); + + describe('Connect to Kafka server', () => { + it('Connect to Kafka server', () => { + const client = new KafkaClient(cs.host.host, KAFKA_PORT, KAFKA_TOPIC); + return client.connect() + .then( + () => { + kafkaClient = client; + }, + (err) => client.close() + .then(() => { + logger.error('Unable to connect to Kafka and Zookeeper. Going to sleep for 2sec and re-try:', err); + return promiseUtils.sleepAndReject(2000, err); + }) + ); }); }); + + describe('Configure TS and generate data', () => { + let consumerDeclaration; + + before(() => { + assert.isNotNull(kafkaClient, 'should have Kafka-Client be ready for tests!'); + + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[KAFKA_CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'Kafka', + host: cs.host.host, + protocol: KAFKA_PROTOCOL, + port: KAFKA_PORT, + topic: KAFKA_TOPIC, + authenticationProtocol: KAFKA_AUTH_PROTOCOL + }; + }); + + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(consumerDeclaration)); + testUtils.shouldSendListenerEvents(harness.bigip, (bigip, proto, port, idx) => `hostname="${bigip.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${KAFKA_CONSUMER_NAME}",protocol="${proto}",msgID="${idx}"`); + }); + + describe('Event Listener data', () => { + const timestampStr = testDataTimestamp.toString(); + + before(() => { + assert.isNotNull(kafkaClient, 'should have Kafka-Client be ready for tests!'); + }); + + harness.bigip.forEach((bigip) => LISTENER_PROTOCOLS + .forEach((proto) => it( + `should check Kafka for event listener data (over ${proto}) for - ${bigip.name}`, + () => kafkaClient.getMessages((log) => log.test === 'true' + && log.hostname === bigip.hostname + && log.protocol === proto + && log.testDataTimestamp === timestampStr + && log.testType === KAFKA_CONSUMER_NAME) + .then((logs) => { + if (logs.length === 0) { + bigip.logger.info('No event listener data found. Going to wait another 10sec'); + return promiseUtils.sleepAndReject(10000, `should have event(s) for a data from event listener (over ${proto})`); + } + return Promise.resolve(); + }) + ))); + }); + + describe('System Poller data', () => { + before(() => { + assert.isNotNull(kafkaClient, 'should have Kafka-Client be ready for tests!'); + }); + + harness.bigip.forEach((bigip) => it( + `should check Kafka for system poller data - ${bigip.name}`, + () => kafkaClient.getMessages((log) => { + if (typeof log.system === 'object' && log.system.hostname === bigip.hostname) { + const schema = miscUtils.readJsonFile(constants.DECL.SYSTEM_POLLER_SCHEMA); + return miscUtils.validateAgainstSchema(log, schema); + } + return false; + }) + .then((logs) => { + if (logs.length === 0) { + bigip.logger.info('No system poller data found. Going to wait another 10sec'); + // more sleep time for system poller + return promiseUtils.sleepAndReject(20000, 'should have event(s) for a data from system poller'); + } + return Promise.resolve(); + }) + )); + }); + + describe('Disconnect from Kafka server', () => { + before(() => { + assert.isNotNull(kafkaClient, 'should have Kafka-Client be ready for tests!'); + }); + + it('Disconnect from Kafka server', () => kafkaClient.close()); + }); }); } +/** + * Teardown CS + */ function teardown() { describe('Consumer Teardown: Kafka', () => { - it(`should remove ${ZOOKEEPER_NAME} container`, () => runRemoteCmd(`docker container rm -f ${ZOOKEEPER_NAME}`)); - it(`should remove ${KAFKA_NAME} container`, () => runRemoteCmd(`docker container rm -f ${KAFKA_NAME}`)); + const cs = harnessUtils.getDefaultHarness().other[0]; + + const serviceNames = Object.keys(DOCKER_CONTAINERS).join(', '); + const containerNames = Object.keys(DOCKER_CONTAINERS).map((serviceName) => DOCKER_CONTAINERS[serviceName].name); + + it(`should stop and remove ${serviceNames} docker containers`, () => harnessUtils.docker.stopAndRemoveContainer(cs.docker, containerNames)); }); } diff --git a/test/functional/consumersTests/openTelemetryExporterTests.js b/test/functional/consumersTests/openTelemetryExporterTests.js index 1b2ef5cf..c2dc3fb4 100644 --- a/test/functional/consumersTests/openTelemetryExporterTests.js +++ b/test/functional/consumersTests/openTelemetryExporterTests.js @@ -6,28 +6,31 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; -const assert = require('assert'); -const fs = require('fs'); -const testUtil = require('../shared/util'); -const utils = require('../../../src/lib/utils/misc'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); +const pathUtil = require('path'); + const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const logger = require('../shared/utils/logger').getChild('otelTests'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const srcMiscUtils = require('../../../src/lib/utils/misc'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/openTelemetryExporter + */ // module requirements const MODULE_REQUIREMENTS = { DOCKER: true }; -const DUTS = testUtil.getHosts('BIGIP'); -const SHOULD_SKIP_TESTS = {}; - -const CONSUMER_HOST = testUtil.getHosts('CONSUMER')[0]; // only expect one const OTEL_METRICS_PATH = '/v1/metrics'; -const OTEL_COLLECTOR_IMAGE_NAME = `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}otel/opentelemetry-collector-contrib`; -const OTEL_COLLECTOR_NAME = 'otel-collector'; -const OTEL_COLLECTOR_HOST = CONSUMER_HOST.ip; const OTEL_COLLECTOR_FOLDER = 'otel'; const OTEL_COLLECTOR_CONF_FILE = 'config.yaml'; const OTEL_COLLECTOR_RECEIVER_PORT = 55681; @@ -52,130 +55,203 @@ service: processors: [batch] exporters: [prometheus]`; +const DOCKER_CONTAINERS = { + OTELCollector: { + detach: true, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}otel/opentelemetry-collector-contrib`, + name: 'otel-collector', + publish: { + [OTEL_COLLECTOR_PROMETHEUS_PORT]: OTEL_COLLECTOR_PROMETHEUS_PORT, + [OTEL_COLLECTOR_RECEIVER_PORT]: OTEL_COLLECTOR_RECEIVER_PORT + }, + restart: 'always', + volume: { + [`$(pwd)/${OTEL_COLLECTOR_FOLDER}/${OTEL_COLLECTOR_CONF_FILE}`]: '/etc/otel/config.yaml' + } + } +}; + // read in example config -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); - -function runRemoteCmd(cmd) { - return testUtil.performRemoteCmd( - CONSUMER_HOST.ip, - CONSUMER_HOST.username, - cmd, - { password: CONSUMER_HOST.password } - ); -} +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; +let CONTAINER_STARTED; +let SHOULD_SKIP_DUE_VERSION; + +/** + * Setup CS and DUTs + */ function setup() { describe('Consumer Setup: OpenTelemetry Exporter', () => { - it('should pull OpenTelemetry Collector docker image', () => runRemoteCmd(`docker pull ${OTEL_COLLECTOR_IMAGE_NAME}`)); + const harness = harnessUtils.getDefaultHarness(); + const cs = harnessUtils.getDefaultHarness().other[0]; + cs.http.createAndSave('otel', { + port: OTEL_COLLECTOR_PROMETHEUS_PORT, + protocol: 'http', + retry: { + maxTries: 10, + delay: 1000 + } + }); - it('should write OpenTelemetry Collector configuration', () => runRemoteCmd(`mkdir -p ${OTEL_COLLECTOR_FOLDER} && echo "${OTEL_COLLECTOR_CONF}" > ${OTEL_COLLECTOR_FOLDER}/${OTEL_COLLECTOR_CONF_FILE}`)); + describe('Docker container setup', () => { + before(() => { + CONTAINER_STARTED = false; + SHOULD_SKIP_DUE_VERSION = {}; + }); - it('should start OpenTelemetry Collector docker container', () => { - const otelCollectorParams = `-p ${OTEL_COLLECTOR_RECEIVER_PORT}:${OTEL_COLLECTOR_RECEIVER_PORT} -p ${OTEL_COLLECTOR_PROMETHEUS_PORT}:${OTEL_COLLECTOR_PROMETHEUS_PORT} -v $(pwd)/${OTEL_COLLECTOR_FOLDER}/${OTEL_COLLECTOR_CONF_FILE}:/etc/otel/config.yaml`; - const cmdOtelCollector = `docker run -d ${otelCollectorParams} --name ${OTEL_COLLECTOR_NAME} ${OTEL_COLLECTOR_IMAGE_NAME}`; + it('should pull OTEL docker image', () => cs.docker.pull(DOCKER_CONTAINERS.OTELCollector.image)); - // simple check to see if OpenTelemetry Collector container already exists - return runRemoteCmd(`docker ps | grep ${OTEL_COLLECTOR_NAME}`) - .then((data) => { - if (data) { - return Promise.resolve(); // exists, continue - } - return runRemoteCmd(cmdOtelCollector); - }); + it('should remove pre-existing OTEL docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.OTELCollector.name + )); + + it('should write OTEL configuration', () => cs.ssh.default.mkdirIfNotExists(OTEL_COLLECTOR_FOLDER) + .then(() => cs.ssh.default.writeToFile( + pathUtil.join(OTEL_COLLECTOR_FOLDER, OTEL_COLLECTOR_CONF_FILE), + OTEL_COLLECTOR_CONF + ))); + + it('should start new OTEL docker container', () => harnessUtils.docker.startNewContainer( + cs.docker, + DOCKER_CONTAINERS.OTELCollector + ) + .then(() => { + CONTAINER_STARTED = true; + })); }); - DUTS.forEach((dut) => it( - `get bigip version and check if version is high enough for OpenTelemetry Exporter - ${dut.hostalias}`, - () => testUtil.getBigipVersion(dut) - .then((response) => { - // OpenTelemetry Exporter consumer is supported on bigip 14.1 and above - SHOULD_SKIP_TESTS[dut.hostalias] = utils.compareVersionStrings(response, '<', '14.1'); - }) - )); + describe('Gather information about DUTs version', () => { + harness.bigip.forEach((bigip) => it( + `should get bigip version and check if version is high enough for OpenTelemetry Exporter - ${bigip.name}`, + () => bigip.icAPI.default.getSoftwareVersion() + .then((version) => { + // OpenTelemetry Exporter consumer is supported on bigip 14.1 and above + SHOULD_SKIP_DUE_VERSION[bigip.hostname] = srcMiscUtils.compareVersionStrings(version, '<', '14.1'); + + logger.info('DUT\' version', { + hostname: bigip.hostname, + shouldSkipTests: SHOULD_SKIP_DUE_VERSION[bigip.hostname], + version + }); + }) + )); + }); }); } +/** + * Tests for DUTs + */ function test() { - const verifyResponse = (response, dutHostname) => { - const dutSystemMemoryRegex = new RegExp(`\\nsystem_memory{.*hostname="${dutHostname}".*} \\d{1,2}\\n`); - const mockAVRMetricRegex = new RegExp(`\\nfunctionalTestMetric{.*hostname="${dutHostname}".*} 147\\n`); - - assert.notStrictEqual( - response.indexOf('# HELP system_tmmCpu system.tmmCpu'), - -1, - 'help text should exist, and contain original metric name' - ); - - assert.notStrictEqual( - response.indexOf('# TYPE system_tmmCpu gauge'), - -1, - 'metric type should be of type \'gauge\'' - ); - - assert.ok( - dutSystemMemoryRegex.test(response), - 'response should include \'system_memory\' metric with appropriate label, and a value' - ); - - assert.ok( - mockAVRMetricRegex.test(response), - 'response should include \'functionalTestMetric\' metric with appropriate label, and a value' - ); - }; - - describe('Consumer Test: OpenTelemetry Exporter - Configure TS and generate data', () => { - const consumerDeclaration = testUtil.deepCopy(DECLARATION); - delete consumerDeclaration.My_Consumer; - consumerDeclaration[OTEL_COLLECTOR_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'OpenTelemetry_Exporter', - host: OTEL_COLLECTOR_HOST, - port: OTEL_COLLECTOR_RECEIVER_PORT, - metricsPath: `${OTEL_METRICS_PATH}` - }; - DUTS.forEach((dut) => { - it(`should configure TS - ${dut.hostalias}`, function () { - if (SHOULD_SKIP_TESTS[dut.hostalias]) { - this.skip(); - } - return dutUtils.postDeclarationToDUT(dut, testUtil.deepCopy(consumerDeclaration)); - }); + describe('Consumer Test: OpenTelemetry Exporter', () => { + const harness = harnessUtils.getDefaultHarness(); + const cs = harness.other[0]; + const testDataTimestamp = Date.now(); - it(`should send known event to TS Event Listener - ${dut.hostalias}`, function () { - if (SHOULD_SKIP_TESTS[dut.hostalias]) { - this.skip(); - } - const mockMsgAVR = `EOCTimestamp="1231232",hostname="${dut.hostname}",functionalTestMetric="147"`; - return dutUtils.sendDataToEventListener(dut, mockMsgAVR, { numOfMsg: 4 }); - }); + /** + * @returns {boolean} true if DUt satisfies version restriction + */ + const isValidDut = (dut) => !SHOULD_SKIP_DUE_VERSION[dut.hostname]; + + before(() => { + assert.isOk(CONTAINER_STARTED, 'should start OTEL container!'); }); - }); - describe('Consumer Test: OpenTelemetry Exporter - Tests', () => { - DUTS.forEach((dut) => { - it(`should check the OpenTelemetry Collector for published data data for - ${dut.hostalias}`, function () { - if (SHOULD_SKIP_TESTS[dut.hostalias]) { - this.skip(); - } - const httpOptions = { - method: 'GET', - port: OTEL_COLLECTOR_PROMETHEUS_PORT, - protocol: 'http' - }; + describe('Configure TS and generate data', () => { + let consumerDeclaration; - testUtil.logger.info('Delay 15000ms to ensure data is sent to OpenTelemetry Collector'); - return testUtil.sleep(15 * 1000) - .then(() => testUtil.makeRequest(OTEL_COLLECTOR_HOST, '/metrics', httpOptions)) - .then((response) => verifyResponse(response, dut.hostname)); + before(() => { + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[OTEL_COLLECTOR_CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'OpenTelemetry_Exporter', + host: cs.host.host, + port: OTEL_COLLECTOR_RECEIVER_PORT, + metricsPath: `${OTEL_METRICS_PATH}` + }; }); + + testUtils.shouldConfigureTS(harness.bigip, (bigip) => (isValidDut(bigip) + ? miscUtils.deepCopy(consumerDeclaration) + : null)); + + testUtils.shouldSendListenerEvents(harness.bigip, (bigip, proto, port, idx) => (isValidDut(bigip) + ? `functionalTestMetric="147",EOCTimestamp="1231232",hostname="${bigip.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${OTEL_COLLECTOR_CONSUMER_NAME}",protocol="${proto}",msgID="${idx}"` + : null)); + }); + + describe('Event Listener data', () => { + harness.bigip.forEach((bigip) => LISTENER_PROTOCOLS + .forEach((proto) => it( + `should check OTEL for event listener data (over ${proto}) for - ${bigip.name}`, + function () { + if (!isValidDut(bigip)) { + return this.skip(); + } + return cs.http.otel.makeRequest({ + uri: '/metrics' + }) + .then((data) => { + const mockAVRMetricRegex = new RegExp(`functionalTestMetric{.*hostname="${bigip.hostname}".*} 147`); + assert.isOk( + data.split('\n') + .some((line) => mockAVRMetricRegex.test(line) && line.indexOf(`protocol="${proto}"`) !== -1), + `should have metrics(s) for a data from event listener (over ${proto})` + ); + }) + .catch((err) => { + bigip.logger.info('No event listener data found. Going to wait another 20sec'); + return promiseUtils.sleepAndReject(20000, err); + }); + } + ))); + }); + + describe('System Poller data', () => { + harness.bigip.forEach((bigip) => it( + `should check OTEL for system poller data - ${bigip.name}`, + function () { + if (!isValidDut(bigip)) { + return this.skip(); + } + return cs.http.otel.makeRequest({ + uri: '/metrics' + }) + .then((data) => { + const dutSystemMemoryRegex = new RegExp(`system_memory{.*hostname="${bigip.hostname}".*} \\d{1,2}`); + assert.isOk( + data.split('\n') + .some((line) => dutSystemMemoryRegex.test(line)), + 'should have metric(s) for a data from system poller' + ); + }) + .catch((err) => { + bigip.logger.info('No system poller data found. Going to wait another 20sec'); + return promiseUtils.sleepAndReject(20000, err); + }); + } + )); }); }); } +/** + * Teardown CS + */ function teardown() { describe('Consumer Teardown: OpenTelemetry Exporter', () => { - it('should remove OpenTelemetry Collector container', () => runRemoteCmd(`docker container rm -f ${OTEL_COLLECTOR_NAME}`)); - it('should remove OpenTelemetry Collector configuration file', () => runRemoteCmd(`rm -rf ${OTEL_COLLECTOR_FOLDER}`)); + const cs = harnessUtils.getDefaultHarness().other[0]; + + it('should stop and remove OTEL docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.OTELCollector.name + )); + + it('should remove OTEL configuration file', () => cs.ssh.default.unlinkIfExists(pathUtil.join(OTEL_COLLECTOR_FOLDER, OTEL_COLLECTOR_CONF_FILE))); + + it('should remove OTEL directory', () => cs.ssh.default.rmdirIfExists(OTEL_COLLECTOR_FOLDER)); }); } diff --git a/test/functional/consumersTests/prometheusTests.js b/test/functional/consumersTests/prometheusTests.js new file mode 100644 index 00000000..a8c1d045 --- /dev/null +++ b/test/functional/consumersTests/prometheusTests.js @@ -0,0 +1,126 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + +const constants = require('../shared/constants'); +const DEFAULT_UNNAMED_NAMESPACE = require('../../../src/lib/constants').DEFAULT_UNNAMED_NAMESPACE; +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/prometheus + */ + +// read in example config +const BASIC_DECL = miscUtils.readJsonFile(constants.DECL.PULL_CONSUMER_BASIC); +const NAMESPACE_DECL = miscUtils.readJsonFile(constants.DECL.PULL_CONSUMER_WITH_NAMESPACE); + +const PROMETHEUS_CONTENT_TYPE = 'text/plain; version=0.0.4; charset=utf-8'; + +/** + * Tests for DUTs + */ +function test() { + describe('Consumer Test: Prometheus', () => { + const harness = harnessUtils.getDefaultHarness(); + + const verifyResponseData = (response) => { + const body = response[0]; + const headers = response[1].headers; + + assert.notStrictEqual( + body.indexOf('# HELP f5_counters_bitsIn counters.bitsIn'), + -1, + 'help text should exist, and contain original metric name' + ); + assert.notStrictEqual( + body.indexOf('f5_counters_bitsIn{networkInterfaces="mgmt"}'), + -1, + 'metric should include label with label value' + ); + assert.notStrictEqual( + body.indexOf('f5_system_tmmTraffic_serverSideTraffic_bitsIn'), + -1, + 'metrics without labels should store path in metric name' + ); + assert.notStrictEqual( + body.match(/(f5_system_memory )[0-9]{1,2}\n/), + null, + 'metric\'s value should only be a numeric, followed by a newline' + ); + assert.deepStrictEqual(headers['content-type'], PROMETHEUS_CONTENT_TYPE, 'content-type should be of type text/plain'); + }; + + describe('Without namespace', () => { + const pullConsumerName = 'My_Pull_Consumer'; + let consumerDeclaration; + + before(() => { + consumerDeclaration = miscUtils.deepCopy(BASIC_DECL); + consumerDeclaration[pullConsumerName].type = 'Prometheus'; + }); + + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(consumerDeclaration)); + }); + + describe('System Poller data', () => { + harness.bigip.forEach((bigip) => it( + `should get the Pull Consumer's formatted data - ${bigip.name}`, + () => bigip.telemetry.getPullConsumerData(pullConsumerName) + .then((response) => verifyResponseData(response)) + )); + + harness.bigip.forEach((bigip) => it( + `should get the Pull Consumer's formatted data using namespace endpoint - ${bigip.name}`, + () => bigip.telemetry + .toNamespace(DEFAULT_UNNAMED_NAMESPACE, true) + .getPullConsumerData(pullConsumerName) + .then((response) => verifyResponseData(response)) + )); + }); + }); + + describe('With namespace', () => { + const namespace = 'Second_Namespace'; + const pullConsumerName = 'Pull_Consumer'; + let consumerDeclaration; + + before(() => { + consumerDeclaration = miscUtils.deepCopy(NAMESPACE_DECL); + consumerDeclaration[namespace][pullConsumerName].type = 'Prometheus'; + }); + + describe('Configure TS and generate data', () => { + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(consumerDeclaration)); + }); + + describe('System Poller data', () => { + harness.bigip.forEach((bigip) => it( + `should get the Pull Consumer's formatted data using namespace endpoint - ${bigip.name}`, + () => bigip.telemetry + .toNamespace(namespace) + .getPullConsumerData(pullConsumerName) + .then((response) => verifyResponseData(response)) + )); + }); + }); + }); +} + +module.exports = { + test +}; diff --git a/test/functional/consumersTests/splunkTests.js b/test/functional/consumersTests/splunkTests.js index 3ee8b325..1d83fbfa 100644 --- a/test/functional/consumersTests/splunkTests.js +++ b/test/functional/consumersTests/splunkTests.js @@ -6,189 +6,227 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; const chai = require('chai'); const chaiAsPromised = require('chai-as-promised'); -const fs = require('fs'); const querystring = require('querystring'); -const util = require('../shared/util'); const constants = require('../shared/constants'); -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); chai.use(chaiAsPromised); const assert = chai.assert; +/** + * @module test/functional/consumersTests/splunk + */ + // module requirements const MODULE_REQUIREMENTS = { DOCKER: true }; -const DUTS = util.getHosts('BIGIP'); -const CONSUMER_HOST = util.getHosts('CONSUMER')[0]; // only expect one -const SPLUNK_IMAGE_NAME = `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}splunk/splunk:latest`; -const SPLUNK_CONTAINER_NAME = 'ts_splunk_consumer'; const SPLUNK_USERNAME = 'admin'; -const SPLUNK_PASSWORD = `${CONSUMER_HOST.password}splunk!`; // might want to generate one instead +const SPLUNK_PASSWORD = `${miscUtils.randomString()}splunk!`; // might want to generate one instead const SPLUNK_AUTH_HEADER = `Basic ${Buffer.from(`${SPLUNK_USERNAME}:${SPLUNK_PASSWORD}`).toString('base64')}`; const SPLUNK_HTTP_PORT = 8000; +const SPLUNK_HTTP_PROTOCOL = 'https'; const SPLUNK_HEC_PORT = 8088; const SPLUNK_SVC_PORT = 8089; const SPLUNK_CONSUMER_NAME = 'Splunk_Consumer'; +const DOCKER_CONTAINERS = { + Splunk: { + detach: true, + env: { + SPLUNK_START_ARGS: '--accept-license', + SPLUNK_PASSWORD + }, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}splunk/splunk:latest`, + name: 'ts_splunk_consumer', + publish: { + [SPLUNK_HEC_PORT]: SPLUNK_HEC_PORT, + [SPLUNK_HTTP_PORT]: SPLUNK_HTTP_PORT, + [SPLUNK_SVC_PORT]: SPLUNK_SVC_PORT + }, + restart: 'always' + } +}; + // read in example config -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; -function runRemoteCmd(cmd) { - return util.performRemoteCmd(CONSUMER_HOST.ip, CONSUMER_HOST.username, cmd, { password: CONSUMER_HOST.password }); -} +let SERVICE_IS_READY; +let SPLUNK_TOKENS; +/** + * Setup CS and DUTs + */ function setup() { - describe('Consumer Setup: Splunk - pull docker image', () => { - it('should pull container image', () => runRemoteCmd(`docker pull ${SPLUNK_IMAGE_NAME}`)); - }); -} - -function test() { - let testDataTimestamp; - const splunkHecTokens = { - events: null, - metrics: null - }; + describe('Consumer Setup: Splunk', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + let CONTAINER_STARTED; + + cs.http.createAndSave('splunk', { + allowSelfSignedCert: true, + headers: { + Authorization: SPLUNK_AUTH_HEADER + }, + json: false, + port: SPLUNK_SVC_PORT, + protocol: SPLUNK_HTTP_PROTOCOL + }); - describe('Consumer Test: Splunk - Configure Service', () => { - it('should start container', () => { - const portArgs = `-p ${SPLUNK_HTTP_PORT}:${SPLUNK_HTTP_PORT} -p ${SPLUNK_SVC_PORT}:${SPLUNK_SVC_PORT} -p ${SPLUNK_HEC_PORT}:${SPLUNK_HEC_PORT}`; - const eArgs = `-e 'SPLUNK_START_ARGS=--accept-license' -e 'SPLUNK_PASSWORD=${SPLUNK_PASSWORD}'`; - const cmd = `docker run -d --name ${SPLUNK_CONTAINER_NAME} ${portArgs} ${eArgs} ${SPLUNK_IMAGE_NAME}`; + describe('Docker container setup', () => { + before(() => { + CONTAINER_STARTED = false; + }); - // simple check to see if container already exists - return runRemoteCmd(`docker ps | grep ${SPLUNK_CONTAINER_NAME}`) - .then((data) => { - if (data) { - return Promise.resolve(); // exists, continue - } - return runRemoteCmd(cmd); - }); - }); + it('should pull Splunk docker image', () => cs.docker.pull(DOCKER_CONTAINERS.Splunk.image)); - it('should check service is up', () => { - const uri = '/services/server/control?output_mode=json'; - const options = { - port: SPLUNK_SVC_PORT, - headers: { - Authorization: SPLUNK_AUTH_HEADER - } - }; + it('should remove pre-existing Splunk docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.Splunk.name + )); - // splunk container takes about 30 seconds to come up - return new Promise((resolve) => { setTimeout(resolve, 10000); }) - .then(() => util.makeRequest(CONSUMER_HOST.ip, uri, options)) - .then((data) => { - util.logger.info(`Splunk response ${uri}`, data); - assert.strictEqual(data.links.restart, '/services/server/control/restart'); - }); + it('should start new Splunk docker container', () => harnessUtils.docker.startNewContainer( + cs.docker, + DOCKER_CONTAINERS.Splunk + ) + .then(() => { + CONTAINER_STARTED = true; + })); }); - it('should configure HTTP data collector for events', () => { - const baseUri = '/services/data/inputs/http'; - const outputMode = 'output_mode=json'; - const tokenName = 'eventsToken'; - - let uri = `${baseUri}/http?${outputMode}&enableSSL=1&disabled=0`; - const options = { - method: 'POST', - port: SPLUNK_SVC_PORT, - headers: { - Authorization: SPLUNK_AUTH_HEADER - } - }; + describe('Configure service', () => { + before(() => { + SPLUNK_TOKENS = { + events: null, + metrics: null + }; + SERVICE_IS_READY = false; + assert.isOk(CONTAINER_STARTED, 'should start Splunk container!'); + }); - // configure global settings, create token - return util.makeRequest(CONSUMER_HOST.ip, uri, options) - .then(() => { - uri = `${baseUri}?${outputMode}`; - return util.makeRequest(CONSUMER_HOST.ip, uri, Object.assign(util.deepCopy(options), { method: 'GET' })); - }) + it('should check service is up', () => cs.http.splunk.makeRequest({ + uri: '/services/server/control?output_mode=json' + }) + // splunk container takes about 30 seconds to come up .then((data) => { - data = data || {}; - // check for existence of the token first - if (data.entry && data.entry.length) { - const exists = data.entry.filter((item) => item.name.indexOf(tokenName) !== -1); - if (exists.length) { - return Promise.resolve({ entry: exists }); // exists, continue - } - } - uri = `${baseUri}?${outputMode}`; - return util.makeRequest(CONSUMER_HOST.ip, uri, Object.assign(util.deepCopy(options), { body: `name=${tokenName}` })); + cs.logger.info('Splunk output:', { data }); + assert.deepStrictEqual(data.links.restart, '/services/server/control/restart', 'should return expected response'); }) - .then((data) => { - try { - splunkHecTokens.events = data.entry[0].content.token; - } catch (error) { - throw new Error('HTTP data collector api token could not be retrieved'); - } - assert.notStrictEqual(splunkHecTokens.events, undefined); - }); - }); + .catch((err) => { + cs.logger.error('Caught error on attempt to check service state. Re-trying in 10sec', err); + return promiseUtils.sleepAndReject(10000, err); + })); + + it('should configure HTTP data collector for events', () => { + const baseUri = '/services/data/inputs/http'; + const outputMode = 'output_mode=json'; + const tokenName = 'eventsToken'; + + return cs.http.splunk.makeRequest({ + method: 'POST', + uri: `${baseUri}/http?${outputMode}&enableSSL=1&disabled=0` + }) + .then(() => cs.http.splunk.makeRequest({ + method: 'GET', + uri: `${baseUri}?${outputMode}` + })) + .then((data) => { + data = data || {}; + // check for existence of the token first + if (data.entry && data.entry.length) { + const exists = data.entry.filter((item) => item.name.indexOf(tokenName) !== -1); + if (exists.length) { + return Promise.resolve({ entry: exists }); // exists, continue + } + } + return cs.http.splunk.makeRequest({ + body: `name=${tokenName}`, + method: 'POST', + uri: `${baseUri}?${outputMode}` + }); + }) + .then((data) => { + SPLUNK_TOKENS.events = data.entry[0].content.token; + assert.isNotEmpty(SPLUNK_TOKENS.events, 'should acquire token for events'); + }) + .catch((err) => { + cs.logger.error('Caught error on attempt to configured HTT data collector. Re-trying in 500ms', err); + return promiseUtils.sleepAndReject(500, err); + }); + }); - it('should configure HTTP data collector for metrics', () => { - const indexesUri = '/services/data/indexes'; - const tokensUri = '/services/data/inputs/http'; - const outputMode = 'output_mode=json'; - const indexName = 'metrics_index'; - const tokenName = 'metrics_token'; - const options = { - method: 'POST', - port: SPLUNK_SVC_PORT, - headers: { - Authorization: SPLUNK_AUTH_HEADER - } - }; + it('should configure HTTP data collector for metrics', () => { + const indexesUri = '/services/data/indexes'; + const tokensUri = '/services/data/inputs/http'; + const outputMode = 'output_mode=json'; + const indexName = 'metrics_index'; + const tokenName = 'metrics_token'; - // configure global settings, create index and token - let uri = `${tokensUri}/http?${outputMode}&enableSSL=1&disabled=0`; - return util.makeRequest(CONSUMER_HOST.ip, uri, options) - .then(() => { - uri = `${indexesUri}/${indexName}?${outputMode}`; - return util.makeRequest(CONSUMER_HOST.ip, uri, Object.assign(util.deepCopy(options), { method: 'GET' })) - .catch(() => { - // index doesn't exist, let's create it - uri = `${indexesUri}?${outputMode}`; - return util.makeRequest(CONSUMER_HOST.ip, uri, Object.assign(util.deepCopy(options), { body: `name=${indexName}&datatype=metric` })); - }); + return cs.http.splunk.makeRequest({ + method: 'POST', + uri: `${tokensUri}/http?${outputMode}&enableSSL=1&disabled=0` }) - .then(() => { + .then(() => cs.http.splunk.makeRequest({ + uri: `${indexesUri}/${indexName}?${outputMode}` + }) + .catch((err) => { + // index doesn't exist, let's create it + cs.logger.error(`Index "${indexName}" doesn't exit. Going to create new one...`, err); + return cs.http.splunk.makeRequest({ + body: `name=${indexName}&datatype=metric`, + method: 'POST', + uri: `${indexesUri}?${outputMode}` + }); + })) // create new token for metrics - uri = `${tokensUri}?${outputMode}`; - return util.makeRequest(CONSUMER_HOST.ip, uri, Object.assign(util.deepCopy(options), { method: 'GET' })); - }) - .then((data) => { - data = data || {}; - // check for existence of the token first - if (data.entry && data.entry.length) { - const exists = data.entry.filter((item) => item.name.indexOf(tokenName) !== -1); - if (exists.length) { - return Promise.resolve({ entry: exists }); // exists, continue + .then(() => cs.http.splunk.makeRequest({ + uri: `${tokensUri}?${outputMode}` + })) + .then((data) => { + data = data || {}; + // check for existence of the token first + if (data.entry && data.entry.length) { + const exists = data.entry.filter((item) => item.name.indexOf(tokenName) !== -1); + if (exists.length) { + return Promise.resolve({ entry: exists }); // exists, continue + } } - } - uri = `${tokensUri}?${outputMode}`; - return util.makeRequest(CONSUMER_HOST.ip, uri, Object.assign(util.deepCopy(options), { - body: `name=${tokenName}&index=${indexName}&source=metricsSourceType&sourcetype=Metrics` - })); - }) - .then((data) => { - try { - splunkHecTokens.metrics = data.entry[0].content.token; - } catch (error) { - throw new Error('HTTP data collector api token could not be retrieved'); - } - assert.notStrictEqual(splunkHecTokens.metrics, undefined); - }); + return cs.http.splunk.makeRequest({ + body: `name=${tokenName}&index=${indexName}&source=metricsSourceType&sourcetype=Metrics`, + method: 'POST', + uri: `${tokensUri}?${outputMode}` + }); + }) + .then((data) => { + SPLUNK_TOKENS.metrics = data.entry[0].content.token; + assert.isNotEmpty(SPLUNK_TOKENS.metrics, 'should acquire token for metrics'); + }) + .catch((err) => { + cs.logger.error('Caught error on attempt to configured HTT metrics collector. Re-trying in 500ms', err); + return promiseUtils.sleepAndReject(500, err); + }); + }); + + it('should acquire all tokens', () => { + assert.isNotNull(SPLUNK_TOKENS.events, 'should acquire token for events'); + assert.isNotNull(SPLUNK_TOKENS.metrics, 'should acquire token for metrics'); + SERVICE_IS_READY = true; + }); }); }); +} +/** + * Tests for DUTs + */ +function test() { const testSetupOptions = { compression: [ { @@ -224,194 +262,229 @@ function test() { }); testSetups.forEach((testSetup) => { - describe(`${testSetup.compression.name}, ${testSetup.format.name}`, () => { - describe('Consumer Test: Splunk - Configure TS and generate data', () => { - const consumerDeclaration = util.deepCopy(DECLARATION); - - // this need only to insert 'splunkHecEventsToken' - it('should compute declaration', () => { - consumerDeclaration[SPLUNK_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'Splunk', - host: CONSUMER_HOST.ip, - protocol: 'https', - port: SPLUNK_HEC_PORT, - passphrase: { - cipherText: splunkHecTokens[testSetup.format.tokenName] - }, - format: testSetup.format.value, - allowSelfSignedCert: true, - compressionType: testSetup.compression.value - }; - }); - - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(consumerDeclaration)) - )); - - if (testSetup.format.eventListenerTests) { - it('set data timestamp', () => { - testDataTimestamp = (new Date()).getTime(); - }); - - it('should send event to TS Event Listener', () => { - const msg = `testDataTimestamp="${testDataTimestamp}",test="true",testType="${SPLUNK_CONSUMER_NAME}"`; - return dutUtils.sendDataToEventListeners((dut) => `hostname="${dut.hostname}",${msg}`); - }); - } + describe(`Consumer Test: Splunk - ${testSetup.compression.name}, ${testSetup.format.name}`, () => { + before(() => { + assert.isTrue(SERVICE_IS_READY, 'should start Splunk service'); }); + testsForSuite(testSetup); + }); + }); +} - describe('Consumer Test: Splunk - Test', () => { - const splunkSourceStr = 'f5.telemetry'; - const splunkSourceTypeStr = 'f5:telemetry:json'; - - // helper function to query splunk for data - const query = (searchString) => { - const baseUri = '/services/search/jobs'; - const outputMode = 'output_mode=json'; - const options = { - port: SPLUNK_SVC_PORT, - headers: { - Authorization: SPLUNK_AUTH_HEADER - } - }; +/** + * Generate tests using test config + * + * @param {Object} testSetup - test config + */ +function testsForSuite(testSetup) { + const harness = harnessUtils.getDefaultHarness(); + const cs = harnessUtils.getDefaultHarness().other[0]; + let testDataTimestamp; - let uri = `${baseUri}?${outputMode}`; - let sid; + describe('Configure TS and generate data', () => { + let consumerDeclaration; + + before(() => { + testDataTimestamp = Date.now(); + + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + consumerDeclaration[SPLUNK_CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'Splunk', + host: cs.host.host, + protocol: 'https', + port: SPLUNK_HEC_PORT, + passphrase: { + cipherText: SPLUNK_TOKENS[testSetup.format.tokenName] + }, + format: testSetup.format.value, + allowSelfSignedCert: true, + compressionType: testSetup.compression.value + }; + }); - return util.makeRequest( - CONSUMER_HOST.ip, - uri, - Object.assign(util.deepCopy(options), { - method: 'POST', - body: querystring.stringify({ - search: searchString - }) - }) - ) + testUtils.shouldConfigureTS(harness.bigip, () => miscUtils.deepCopy(consumerDeclaration)); + testUtils.shouldSendListenerEvents(harness.bigip, (bigip, proto, port, idx) => `hostname="${bigip.hostname}",testDataTimestamp="${testDataTimestamp}",test="true",testType="${SPLUNK_CONSUMER_NAME}",protocol="${proto}",msgID="${idx}"`); + }); + + /** + * Query to search metrics + * + * @param {harness.BigIp} bigip - BIG-IP + * + * @returns {string} query + */ + const searchMetrics = (bigip) => `| mcatalog values(metric_name) WHERE index=* AND host="${bigip.hostname}" AND earliest=-30s latest=now`; + + /** + * Query to search system poller data + * + * @param {harness.BigIp} bigip - BIG-IP + * + * @returns {string} query + */ + const searchQuerySP = (bigip) => `search source=f5.telemetry earliest=-30s latest=now | search "system.hostname"="${bigip.hostname}" | head 1`; + + /** + * Query to search event listener data + * + * @param {harness.BigIp} bigip - BIG-IP + * + * @returns {string} query + */ + const searchQueryEL = (bigip, proto) => `search source=f5.telemetry | spath testType | search testType="${SPLUNK_CONSUMER_NAME}" | search hostname="${bigip.hostname}" | search testDataTimestamp="${testDataTimestamp}" | search protocol="${proto}" | head 1`; + + const splunkSourceStr = 'f5.telemetry'; + const splunkSourceTypeStr = 'f5:telemetry:json'; + + /** + * Send query to Splunk + * + * @param {string} searchString - query string + * + * @returns {Promise} resolved once search request processed + */ + const query = (searchString) => { + const baseUri = '/services/search/jobs'; + const outputMode = 'output_mode=json'; + let sid; + + return cs.http.splunk.makeRequest({ + body: querystring.stringify({ + search: searchString + }), + expectedResponseCode: [200, 201], + method: 'POST', + uri: `${baseUri}?${outputMode}` + }) + .then((data) => { + sid = data.sid; + assert.isDefined(sid, 'should have sid'); + + return promiseUtils.loopUntil((breakCb) => cs.http.splunk.makeRequest({ + uri: `${baseUri}/${sid}?${outputMode}` + }) + .then((status) => { + if (status.entry[0].content.dispatchState === 'DONE') { + return breakCb(); + } + return promiseUtils.sleep(300); + })); + }) + .then(() => cs.http.splunk.makeRequest({ + uri: `${baseUri}/${sid}/results/?${outputMode}` + })); + }; + + if (testSetup.format.eventListenerTests) { + describe('Event Listener data', () => { + harness.bigip.forEach((bigip) => LISTENER_PROTOCOLS + .forEach((proto) => it( + `should check Splunk for event listener data (over ${proto}) for - ${bigip.name}`, + () => query(searchQueryEL(bigip, proto)) .then((data) => { - sid = data.sid; - assert.notStrictEqual(sid, undefined); - - // wait until job search is complete using dispatchState:'DONE' - return new Promise((resolve, reject) => { - const waitUntilDone = () => { - uri = `${baseUri}/${sid}?${outputMode}`; - return new Promise((resolveTimer) => { setTimeout(resolveTimer, 100); }) - .then(() => util.makeRequest(CONSUMER_HOST.ip, uri, options)) - .then((status) => { - const dispatchState = status.entry[0].content.dispatchState; - if (dispatchState === 'DONE') { - resolve(status); - return Promise.resolve(status); - } - return waitUntilDone(); - }) - .catch(reject); - }; - waitUntilDone(); // start - }); + // check we have results + const results = data.results; + assert.isArray(results, 'should be array'); + assert.isNotEmpty(results, 'should return search results'); + + // check that the event is what we expect + const result = results[0]; + const rawData = JSON.parse(result._raw); + + assert.deepStrictEqual(rawData.testType, SPLUNK_CONSUMER_NAME); + assert.deepStrictEqual(rawData.hostname, bigip.hostname); + // validate data parsed by Splunk + assert.deepStrictEqual(result.host, bigip.hostname); + assert.deepStrictEqual(result.source, splunkSourceStr); + assert.deepStrictEqual(result.sourcetype, splunkSourceTypeStr); + assert.deepStrictEqual(result.hostname, bigip.hostname); + assert.deepStrictEqual(result.protocol, proto); + assert.deepStrictEqual(result.testType, SPLUNK_CONSUMER_NAME); + assert.deepStrictEqual(result.testDataTimestamp, `${testDataTimestamp}`); }) - .then(() => { - uri = `${baseUri}/${sid}/results/?${outputMode}`; - return util.makeRequest(CONSUMER_HOST.ip, uri, options); - }); - }; - // end helper function - - DUTS.forEach((dut) => { - // use earliest and latests query modifiers to filter results - const searchMetrics = () => `| mcatalog values(metric_name) WHERE index=* AND host="${dut.hostname}" AND earliest=-30s latest=now`; - const searchQuerySP = () => `search source=f5.telemetry earliest=-30s latest=now | search "system.hostname"="${dut.hostname}" | head 1`; - const searchQueryEL = () => `search source=f5.telemetry | spath testType | search testType="${SPLUNK_CONSUMER_NAME}" | search hostname="${dut.hostname}" | search testDataTimestamp="${testDataTimestamp}" | head 1`; - - if (testSetup.format.queryEventsTests) { - it(`should check for system poller data from:${dut.hostalias}`, () => new Promise((resolve) => { setTimeout(resolve, 30000); }) - .then(() => { - util.logger.info(`Splunk search query for system poller data: ${searchQuerySP()}`); - return query(searchQuerySP()); - }) - .then((data) => { - util.logger.info('Splunk response:', data); - // check we have results - const results = data.results; - assert.strictEqual(results.length > 0, true, 'No results'); - // check that the event is what we expect - const result = results[0]; - const rawData = JSON.parse(result._raw); - // validate raw data against schema - const schema = JSON.parse(fs.readFileSync(constants.DECL.SYSTEM_POLLER_SCHEMA)); - const valid = util.validateAgainstSchema(rawData, schema); - if (valid !== true) { - assert.fail(`output is not valid: ${JSON.stringify(valid.errors)}`); - } - // validate data parsed by Splunk - assert.strictEqual(result.host, dut.hostname); - assert.strictEqual(result.source, splunkSourceStr); - assert.strictEqual(result.sourcetype, splunkSourceTypeStr); - })); - } - - if (testSetup.format.metricsTests) { - it(`should check for system poller metrics from:${dut.hostalias}`, () => new Promise((resolve) => { setTimeout(resolve, 30000); }) - .then(() => { - util.logger.info(`Splunk search query for system poller data: ${searchMetrics()}`); - return query(searchMetrics()); - }) - .then((data) => { - util.logger.info('Splunk response:', data); - // check we have results - const results = data.results; - assert.strictEqual(results.length > 0, true, 'No results'); - // check that the event is what we expect - const metrics = results[0]['values(metric_name)']; - // check that at least some metrics exists - assert.includeMembers(metrics, [ - 'avgCycles', - 'cpu', - 'systemTimestamp', - 'tmmCpu', - 'tmmMemory' - ], 'should at least have some metrics'); - })); - } - - if (testSetup.format.eventListenerTests) { - it(`should check for event listener data from:${dut.hostalias}`, () => new Promise((resolve) => { setTimeout(resolve, 30000); }) - .then(() => { - util.logger.info(`Splunk search query for event listener data: ${searchQueryEL()}`); - return query(searchQueryEL()); - }) - .then((data) => { - util.logger.info('Splunk response:', data); - // check we have results - const results = data.results; - assert.strictEqual(results.length > 0, true, 'No results'); - // check that the event is what we expect - const result = results[0]; - const rawData = JSON.parse(result._raw); - - assert.strictEqual(rawData.testType, SPLUNK_CONSUMER_NAME); - assert.strictEqual(rawData.hostname, dut.hostname); - // validate data parsed by Splunk - assert.strictEqual(result.host, dut.hostname); - assert.strictEqual(result.source, splunkSourceStr); - assert.strictEqual(result.sourcetype, splunkSourceTypeStr); - assert.strictEqual(result.hostname, dut.hostname); - assert.strictEqual(result.testType, SPLUNK_CONSUMER_NAME); - assert.strictEqual(result.testDataTimestamp, `${testDataTimestamp}`); - })); - } - }); - }); + .catch((err) => { + bigip.logger.info('No event listener data found. Going to wait another 20sec'); + return promiseUtils.sleepAndReject(20000, err); + }) + ))); }); + } + + describe('System Poller data', () => { + // use earliest and latests query modifiers to filter results + if (testSetup.format.queryEventsTests) { + harness.bigip.forEach((bigip) => it( + `should check Splunk for system poller data - ${bigip.name}`, + () => query(searchQuerySP(bigip)) + .then((data) => { + // check we have results + const results = data.results; + assert.isArray(results, 'should be array'); + assert.isNotEmpty(results, 'should return search results'); + + // check that the event is what we expect + const result = results[0]; + const rawData = JSON.parse(result._raw); + + // validate raw data against schema + const schema = miscUtils.readJsonFile(constants.DECL.SYSTEM_POLLER_SCHEMA); + const valid = miscUtils.validateAgainstSchema(rawData, schema); + assert.isTrue(valid, `should have valid output: ${JSON.stringify(valid.errors)}`); + + // validate data parsed by Splunk + assert.deepStrictEqual(result.host, bigip.hostname); + assert.deepStrictEqual(result.source, splunkSourceStr); + assert.deepStrictEqual(result.sourcetype, splunkSourceTypeStr); + }) + .catch((err) => { + bigip.logger.info('No system poller data found. Going to wait another 20sec'); + return promiseUtils.sleepAndReject(20000, err); + }) + )); + } + + if (testSetup.format.metricsTests) { + harness.bigip.forEach((bigip) => it( + `should check Splunk for system poller metrics for - ${bigip.name}`, + () => query(searchMetrics(bigip)) + .then((data) => { + // check we have results + const results = data.results; + assert.isArray(results, 'should be array'); + assert.isNotEmpty(results, 'should return search results'); + + // check that the event is what we expect + const metrics = results[0]['values(metric_name)']; + // check that at least some metrics exists + assert.includeMembers(metrics, [ + 'avgCycles', + 'cpu', + 'systemTimestamp', + 'tmmCpu', + 'tmmMemory' + ], 'should at least have some metrics'); + }) + .catch((err) => { + bigip.logger.info('No system poller metrics found. Going to wait another 20sec'); + return promiseUtils.sleepAndReject(20000, err); + }) + )); + } }); } +/** + * Teardown CS + */ function teardown() { - describe('Consumer Test: Splunk - teardown', () => { - it('should remove container', () => runRemoteCmd(`docker container rm -f ${SPLUNK_CONTAINER_NAME}`)); + describe('Consumer Teardown: Splunk', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + + it('should stop and remove Splunk docker container', () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, + DOCKER_CONTAINERS.Splunk.name + )); }); } diff --git a/test/functional/consumersTests/statsdTests.js b/test/functional/consumersTests/statsdTests.js index 71f23c06..dbd6b218 100644 --- a/test/functional/consumersTests/statsdTests.js +++ b/test/functional/consumersTests/statsdTests.js @@ -6,114 +6,216 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; -const assert = require('assert'); -const fs = require('fs'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); const deepDiff = require('deep-diff'); -const util = require('../shared/util'); + const constants = require('../shared/constants'); const DEFAULT_HOSTNAME = require('../../../src/lib/constants').DEFAULT_HOSTNAME; -const dutUtils = require('../dutTests').utils; +const harnessUtils = require('../shared/harness'); +const miscUtils = require('../shared/utils/misc'); +const promiseUtils = require('../shared/utils/promise'); +const testUtils = require('../shared/testUtils'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/consumersTests/statsd + */ // module requirements const MODULE_REQUIREMENTS = { DOCKER: true }; -const DUTS = util.getHosts('BIGIP'); -const CONSUMER_HOST = util.getHosts('CONSUMER')[0]; // only expect one -const STATSD_IMAGE_NAME = `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}graphiteapp/graphite-statsd:latest`; -const STATSD_CONTAINER_NAME = 'ts_statsd_consumer'; -const STATSD_HTTP_PROTO = 'http'; -const STATSD_HTTP_PORT = 80; -const STATSD_DATA_PORT = 8125; -const STATSD_CONSUMER_NAME = 'StatsD_Consumer'; -const STATSD_PROTOCOLS = ['tcp', 'udp']; +const STATSD_DEFAULT_DATA_PORT = 8125; +const STATSD_DEFAULT_HTTP_PORT = 8080; +const STATSD_DEFAULT_HTTP_PROTO = 'http'; +const STATSD_DEFAULT_HTTP_TIMEOUT = 10000; + +const STATSD_DOCKER_CONF = { + detach: true, + image: `${constants.ARTIFACTORY_DOCKER_HUB_PREFIX}graphiteapp/graphite-statsd:1.1.9-1`, // change to 'latest' when the bug will be fixed + restart: 'always' +}; + +const STATSD_CONFIGS = { + tcp: { + CONSUMER_NAME: 'StatsD_Consumer_TCP', + DATA_PORT: 58125, + HTTP_PORT: 58080, + SERVICE_NAME: 'StatsD_TCP' + }, + udp: { + CONSUMER_NAME: 'StatsD_Consumer_UDP', + DATA_PORT: 58126, + HTTP_PORT: 58081, + SERVICE_NAME: 'StatsD_UDP' + } +}; + +const DOCKER_CONTAINERS = { + StatsD_TCP: Object.assign(miscUtils.deepCopy(STATSD_DOCKER_CONF), { + env: { + GOCARBON: 1, + STATSD_INTERFACE: 'tcp' + }, + name: 'ts_statsd_consumer_tcp', + publish: { + [STATSD_CONFIGS.tcp.DATA_PORT]: `${STATSD_DEFAULT_DATA_PORT}/tcp`, + [STATSD_CONFIGS.tcp.HTTP_PORT]: STATSD_DEFAULT_HTTP_PORT + } + }), + StatsD_UDP: Object.assign(miscUtils.deepCopy(STATSD_DOCKER_CONF), { + env: { + GOCARBON: 1, + STATSD_INTERFACE: 'udp' + }, + name: 'ts_statsd_consumer_udp', + publish: { + [STATSD_CONFIGS.udp.DATA_PORT]: `${STATSD_DEFAULT_DATA_PORT}/udp`, + [STATSD_CONFIGS.udp.HTTP_PORT]: STATSD_DEFAULT_HTTP_PORT + } + }) +}; // read in example config -const DECLARATION = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); +const DECLARATION = miscUtils.readJsonFile(constants.DECL.BASIC); -function runRemoteCmd(cmd) { - return util.performRemoteCmd(CONSUMER_HOST.ip, CONSUMER_HOST.username, cmd, { password: CONSUMER_HOST.password }); -} +let SERVICES_ARE_READY; +/** + * Setup CS and DUTs + */ function setup() { - describe('Consumer Setup: Statsd - pull docker image', () => { - it('should pull container image', () => runRemoteCmd(`docker pull ${STATSD_IMAGE_NAME}`)); - }); -} + describe('Consumer Setup: StatsD', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + let CONTAINERS_STARTED; -function test() { - STATSD_PROTOCOLS.forEach((protocol) => describe(`Consumer Test: Statsd | Protocol: ${protocol}`, () => { - const containerName = `${STATSD_CONTAINER_NAME}-${protocol}`; - describe('Consumer Test: Statsd - Configure Service', () => { - it('should start container', () => { - const envVars = `-e STATSD_INTERFACE=${protocol} -e GOCARBON=1`; - const portArgs = `-p ${STATSD_HTTP_PORT}:${STATSD_HTTP_PORT} -p ${STATSD_DATA_PORT}:${STATSD_DATA_PORT}/${protocol}`; - const cmd = `docker run -d --restart=always --name ${containerName} ${portArgs} ${envVars} ${STATSD_IMAGE_NAME}`; - - // simple check to see if container already exists - return runRemoteCmd(`docker ps | grep ${containerName}`) - .then((data) => { - if (data) { - return Promise.resolve(); // exists, continue - } - return runRemoteCmd(cmd); - }); + Object.keys(STATSD_CONFIGS).forEach((proto) => { + cs.http.createAndSave(`${proto}Statsd`, { + port: STATSD_CONFIGS[proto].HTTP_PORT, + protocol: STATSD_DEFAULT_HTTP_PROTO, + timeout: STATSD_DEFAULT_HTTP_TIMEOUT + }); + }); + + describe('Docker container setup', () => { + before(() => { + CONTAINERS_STARTED = []; + }); + + after(() => { + CONTAINERS_STARTED = CONTAINERS_STARTED.every((v) => v); + }); + + it('should pull StatsD docker image', () => cs.docker.pull(DOCKER_CONTAINERS.StatsD_TCP.image)); + + Object.keys(STATSD_CONFIGS).forEach((proto) => { + it(`should remove pre-existing StatsD docker container (over ${proto})`, () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, DOCKER_CONTAINERS[STATSD_CONFIGS[proto].SERVICE_NAME].name + )); }); - it('should check service is up', () => { - const uri = '/render?someUnknownKey&format=json'; - const options = { - port: STATSD_HTTP_PORT, - protocol: STATSD_HTTP_PROTO + Object.keys(STATSD_CONFIGS).forEach((proto) => { + it(`should start new StatsD container (over ${proto})`, () => harnessUtils.docker.startNewContainer( + cs.docker, DOCKER_CONTAINERS[STATSD_CONFIGS[proto].SERVICE_NAME] + ) + .then(() => { + CONTAINERS_STARTED.push(true); + })); + }); + }); + + describe('Configure service', () => { + before(() => { + assert.isOk(CONTAINERS_STARTED, 'should start StatsD TCP and UDP containers!'); + SERVICES_ARE_READY = []; + }); - }; + after(() => { + SERVICES_ARE_READY = SERVICES_ARE_READY.every((v) => v); + }); - // splunk container takes about 15 seconds to come up - return new Promise((resolve) => { setTimeout(resolve, 1500); }) - .then(() => util.makeRequest(CONSUMER_HOST.ip, uri, options)) + Object.keys(STATSD_CONFIGS).forEach((proto) => { + it(`should check StatsD container is up and running (over ${proto})`, () => cs.http[`${proto}Statsd`].makeRequest({ + retry: { + maxTries: 10, + delay: 1000 + }, + uri: '/render?someUnknownKey&format=json' + }) .then((data) => { - util.logger.info('Statsd response:', data); - assert.strictEqual(Array.isArray(data), true); - assert.strictEqual(data.length, 0); - }); + cs.logger.info('StatsD response', { data, proto }); + assert.isArray(data); + assert.isEmpty(data); + SERVICES_ARE_READY.push(true); + })); }); }); + }); +} - describe('Consumer Test: Statsd - Configure TS', () => { - const consumerDeclaration = util.deepCopy(DECLARATION); - consumerDeclaration[STATSD_CONSUMER_NAME] = { - class: 'Telemetry_Consumer', - type: 'Statsd', - host: CONSUMER_HOST.ip, - protocol, - port: STATSD_DATA_PORT - }; - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(consumerDeclaration)) - )); +/** + * Tests for DUTs + */ +function test() { + describe('Consumer Test: StatsD', () => { + const harness = harnessUtils.getDefaultHarness(); + const cs = harness.other[0]; + const sysPollerMetricNames = {}; + + before(() => { + assert.isOk(SERVICES_ARE_READY, 'should start StatsD TCP and UDP services!'); }); - describe('Consumer Test: Statsd - Test', () => { + describe('Configure TS and generate data', () => { + let consumerDeclaration; + + before(() => { + consumerDeclaration = miscUtils.deepCopy(DECLARATION); + Object.keys(STATSD_CONFIGS).forEach((proto) => { + const config = STATSD_CONFIGS[proto]; + consumerDeclaration[config.CONSUMER_NAME] = { + class: 'Telemetry_Consumer', + type: 'Statsd', + host: cs.host.host, + protocol: proto, + port: config.DATA_PORT + }; + }); + }); + + testUtils.shouldConfigureTS(harness.bigip, () => consumerDeclaration); + }); + + describe('System Poller data', () => { /** * Note: statsd/graphite stores only counters, no strings. * Verification is simple - just check that at least one metric is not empty */ - // helper function to query statsd for data - const query = (searchString) => { - const uri = `/render?target=stats.gauges.${searchString}&format=json&from=-3minutes`; - const options = { - port: STATSD_HTTP_PORT, - protocol: STATSD_HTTP_PROTO - }; - - return util.makeRequest(CONSUMER_HOST.ip, uri, options) - .then((data) => Promise.resolve([searchString, data])); - }; + /** + * Query metrics from StatsD + * + * @param {string} searchString - search string + * + * @returns {Promise>} resolved with search results + */ + const queryStatsD = (proto, searchString) => cs.http[`${proto}Statsd`].makeRequest({ + retry: { + maxTries: 10, + delay: 200 + }, + uri: `/render?target=stats.gauges.${searchString}&format=json&from=-3minutes` + }) + .then((data) => [searchString, data]); + /** + * Remove metrics from data + * + * @param {Object} data - data + */ const stripMetrics = (data) => { Object.keys(data).forEach((item) => { if (Number.isInteger(data[item])) { @@ -124,8 +226,15 @@ function test() { }); }; - const getMetricsName = (data) => { - const copyData = JSON.parse(JSON.stringify(data)); + /** + * Get metric names from data + * + * @param {Object} data - data + * + * @returns {Array} array of metric names + */ + const getMetricNames = (data) => { + const copyData = miscUtils.deepCopy(data); stripMetrics(copyData); const diff = deepDiff(copyData, data) || []; @@ -140,81 +249,69 @@ function test() { ].concat(item.path).map((i) => i.replace(/\.|\/|:/g, '-')).join('.')); }; - const verifyMetrics = (metrics) => { - let idx = 0; - let hasIndexed = false; + harness.bigip.forEach((bigip) => it( + `should fetch system poller data via debug endpoint - ${bigip.name}`, + () => bigip.telemetry.getSystemPollerData(constants.DECL.SYSTEM_NAME) + .then((data) => { + sysPollerMetricNames[bigip.hostname] = getMetricNames(data[0]); + }) + )); - const getNextMetrics = () => { - const promises = []; + Object.keys(STATSD_CONFIGS).forEach( + (proto) => harness.bigip.forEach((bigip) => it( + `should check StatsD for system poller data - ${bigip.name} (over ${proto})`, + () => { + const metricNames = sysPollerMetricNames[bigip.hostname]; + let metricsFound = false; - for (let i = 0; i < 4 && idx < metrics.length; i += 1) { - promises.push(query(metrics[idx])); - idx += 1; - } - return Promise.all(promises) - .then((data) => { - data.forEach((item) => { - /** - * item = [metricName, data] - * data is array of objects like { targets: {}, tags: {}, datapoints: []} - */ - if (Array.isArray(item[1]) && item[1].length > 0 - && item[1][0].datapoints && item[1][0].datapoints.length > 0) { - util.logger.info(`Metic ${item[0]}: `, item[1]); - hasIndexed = true; + assert.isNotEmpty(metricNames, 'should have metric names from system poller data'); + + return promiseUtils.loopForEach( + metricNames, + (metricName, idx, arr, breakCb) => queryStatsD(proto, metricName) + .then((queryRet) => { + /** + * queryRet = [metricName, data] + * data is array of objects like { targets: {}, tags: {}, datapoints: []} + */ + const data = queryRet[1]; + if (Array.isArray(data) && data.length > 0 + && data[0].datapoints && data[0].datapoints.length > 0) { + bigip.logger.info('StatsD metric was found', { idx, queryRet }); + metricsFound = true; + return breakCb(); + } + return Promise.resolve(); + }) + ) + .then(() => { + if (metricsFound) { + return Promise.resolve(); } + bigip.logger.info('Waiting for data to be indexed...'); + // more sleep time for system poller data to be indexed + return promiseUtils.sleepAndReject(20000, 'should have metrics indexed from system poller data'); }); - if (hasIndexed) { - return Promise.resolve(); - } - if (idx < metrics.length) { - return getNextMetrics(); - } - /** - * Reasons for retry: - * - indexing is still in process - * - system poller not sent data yet - * Sleep for 30 second(s) and return Promise.reject to allow retry - */ - util.logger.info('Waiting for data to be indexed...'); - return new Promise((resolveTimer) => { setTimeout(resolveTimer, 30000); }) - .then(() => Promise.reject(new Error('Metrics are empty / not indexed'))); - }); - }; - return getNextMetrics(); - }; - - // end helper function - - const sysPollerMetricsData = {}; - - it('should fetch system poller data via debug endpoint from DUTs', () => dutUtils.getSystemPollersData((hostObj, data) => { - sysPollerMetricsData[hostObj.hostname] = getMetricsName(data[0]); - })); - - DUTS.forEach((dut) => { - // at first we need to retrieve list of metrics to poll - it(`should check for system poller data from:${dut.hostalias}`, () => { - const metrics = sysPollerMetricsData[dut.hostname]; - if (!metrics) { - throw new Error(`No System Poller Metrics data for ${dut.hostalias} !`); } - // all metrics should be non-empty array - it means they were added to index - return verifyMetrics(metrics); - }); - }); - }); - - // Just stop the container, so we can reuse ports on next run - describe('Consumer Test: Statsd - Stop container', () => { - it('should remove container(s)', () => runRemoteCmd(`docker stop ${containerName}`)); + )) + ); }); - })); + }); } +/** + * Teardown CS + */ function teardown() { - describe('Consumer Test: Statsd - teardown', () => { - STATSD_PROTOCOLS.forEach((protocol) => it(`should remove ${protocol} container(s)`, () => runRemoteCmd(`docker container rm -f ${STATSD_CONTAINER_NAME}-${protocol}`))); + describe('Consumer Teardown: StatsD', () => { + const cs = harnessUtils.getDefaultHarness().other[0]; + + Object.keys(STATSD_CONFIGS).forEach((proto) => it( + `should stop and remove StatsD docker container (${proto} data transport)`, + () => harnessUtils.docker.stopAndRemoveContainer( + cs.docker, DOCKER_CONTAINERS[STATSD_CONFIGS[proto].SERVICE_NAME].name + ) + )); }); } diff --git a/test/functional/deployment/example_harness_facts.json b/test/functional/deployment/example_harness_facts.json index a6ab34f0..3aa86a17 100644 --- a/test/functional/deployment/example_harness_facts.json +++ b/test/functional/deployment/example_harness_facts.json @@ -1,6 +1,6 @@ [ { - "admin_ip": "10.144.130.132", + "admin_ip": "192.168.0.1", "ssh_user": { "username": "root", "password": "default" @@ -21,7 +21,7 @@ "version": "13.1.1.4" }, "f5_base_conf_done": true, - "f5_license_key": "C6587-05956-55278-35216-5256455", + "f5_license_key": "XXXXX-ZZZZZ-AAAAA-BBBBB-FFFFFFF", "f5_hostname": "ts_test_13_1-bigip.localhost.localdomain", "f5_int_vlan_name": "internal", "f5_int_vlan_interface": "1.1", @@ -44,17 +44,17 @@ "is_f5_device": true, "network": { "mgmt": { - "ip": "10.144.130.132", - "mac": "fa:16:3e:c3:c8:00" + "ip": "192.168.0.1", + "mac": "AA:BB:CC:DD:EE:F0" }, "network1": { "ip": "10.2.0.3", - "mac": "fa:16:3e:32:e2:c4" + "mac": "AA:BB:CC:DD:EE:F1" } } }, { - "admin_ip": "10.144.130.27", + "admin_ip": "192.168.0.2", "ssh_user": { "username": "root", "password": "default" @@ -67,12 +67,13 @@ "is_f5_device": false, "network": { "mgmt": { - "ip": "10.144.130.27", - "mac": "fa:16:3e:40:f4:26" + "ip": "192.168.0.2", + "mac": "AA:BB:CC:DD:EE:F2" }, "network1": { "ip": "10.2.0.1", - "mac": "fa:16:3e:22:e1:eb" + "mac": "AA:BB:CC:DD:EE:F3" } } - }] \ No newline at end of file + } +] \ No newline at end of file diff --git a/test/functional/dutTests.js b/test/functional/dutTests.js index 7e09e0d0..94ae33be 100644 --- a/test/functional/dutTests.js +++ b/test/functional/dutTests.js @@ -6,389 +6,132 @@ * the software product on devcentral.f5.com. */ -// this object not passed with lambdas, which mocha uses - 'use strict'; const chai = require('chai'); const chaiAsPromised = require('chai-as-promised'); const fs = require('fs'); -const net = require('net'); +const pathUtil = require('path'); const readline = require('readline'); -const util = require('./shared/util'); + const constants = require('./shared/constants'); const DEFAULT_UNNAMED_NAMESPACE = require('../../src/lib/constants').DEFAULT_UNNAMED_NAMESPACE; +const harnessUtils = require('./shared/harness'); +const logger = require('./shared/utils/logger').getChild('dutTests'); +const miscUtils = require('./shared/utils/misc'); +const promiseUtils = require('./shared/utils/promise'); +const testUtils = require('./shared/testUtils'); chai.use(chaiAsPromised); const assert = chai.assert; -const duts = util.getHosts('BIGIP'); -const packageDetails = util.getPackageDetails(); -const basicDeclaration = JSON.parse(fs.readFileSync(constants.DECL.BASIC)); -const namespaceDeclaration = JSON.parse(fs.readFileSync(constants.DECL.BASIC_NAMESPACE)); - -/** - * Post declaration to TS on DUT - * - * @param {Object} dut - DUT (device under test) object - * @param {String} dut.ip - host - * @param {String} dut.user - username - * @param {String} dut.password - password - * @param {String} dut.hostname - hostname - * @param {Object} declaration - declaration to send to TS - * - * @returns {Object} Promise resolved when request succeed - */ -function postDeclarationToDUT(dut, declaration) { - util.logger.info(`Going to send following declaration to host ${dut.hostname}`, declaration); - return util.postDeclaration(dut, declaration) - .then((data) => { - assert.strictEqual(data.message, 'success'); - }); -} - -/** - * Post declaration to TS on DUTs - * - * @param {Function} callback - callback, should return declaration - * - * @returns {Object} Promise resolved when all requests succeed - */ -function postDeclarationToDUTs(callback) { - return Promise.all(duts.map((dut) => postDeclarationToDUT(dut, callback(dut)))); -} - -/** - * Send message(s) to TS Event Listener - * - * @param {Object} dut - DUT (device under test) object - * @param {String} dut.ip - host - * @param {String} message - message to send - * @param {Object} [opts] - options - * @param {Integer} [opts.numOfMsg] - number of messages to send, by default 15 - * @param {Integer} [opts.delay] - delay (in ms) before sending next message, by default 4000ms - * - * @returns {Object} Promise resolved when all messages were sent to Event Listener - */ -function sendDataToEventListener(dut, message, opts) { - opts = opts || {}; - opts.numOfMsg = typeof opts.numOfMsg === 'undefined' ? 15 : opts.numOfMsg; - opts.delay = typeof opts.delay === 'undefined' ? 4000 : opts.delay; - - util.logger.info(`Sending ${opts.numOfMsg} messages to Event Listener ${dut.ip}`); - return new Promise((resolve, reject) => { - function sendData(i) { - if (i >= opts.numOfMsg) { - resolve(); - return; - } - new Promise((timeoutResolve) => { setTimeout(timeoutResolve, opts.delay); }) - .then(() => util.sendEvent(dut.ip, message)) - .then(() => sendData(i + 1)) - .catch(reject); - } - sendData(0); - }); -} - -/** - * Send data to TS Event Listener on DUTs - * - * @param {Function} callback - callback, should return data - * @param {Number} [numOfMsg] - number of messages to send, by default 15 - * @param {Number} [delay] - delay (in ms) before sending next message, by default 4000ms - * - * @returns {Object} Promise resolved when all messages were sent to Event Listeners - */ -function sendDataToEventListeners(callback, numOfMsg, delay) { - return Promise.all(duts.map((dut) => sendDataToEventListener(dut, callback(dut), { numOfMsg, delay }))); -} - /** - * Query a given PullConsumer for data. - * - * @param {Object} dut - Device Under Test object - * @param {String} pullConsumerName - Name of the configured Pull Consumer - * @param {String} [options.namespace] - Optional namespace name - * @param {Boolean} [options.rawResponse] - Whether or not to return full HTTP response, or just the HTTP body - * - * @returns {Object} Promise resolved with HTTP body, or full HTTP response + * @module test/functional/dutTests */ -function getPullConsumerData(dut, pullConsumerName, options) { - options = options || {}; - const namespacePath = options.namespace ? `/namespace/${options.namespace}` : ''; - const rawResponse = options.rawResponse; - const uri = `${constants.BASE_ILX_URI}${namespacePath}/pullconsumer/${pullConsumerName}`; - const host = dut.ip; - const user = dut.username; - const password = dut.password; - - return util.getAuthToken(host, user, password) - .then((data) => { - const postOptions = { - method: 'GET', - headers: { - 'x-f5-auth-token': data.token - }, - rawResponse - }; - return util.makeRequest(host, uri, postOptions); - }); -} /** - * Fetch System Poller data from DUT - * - * @param {Object} dut - DUT (device under test) object - * @param {String} dut.ip - host - * @param {String} dut.user - username - * @param {String} dut.password - password - * - * @returns {Object} Promise resolved when request succeed + * Setup DUTs */ -function getSystemPollerData(dut, sysPollerName) { - const uri = `${constants.BASE_ILX_URI}/systempoller/${sysPollerName}`; - const host = dut.ip; - const user = dut.username; - const password = dut.password; - - return util.getAuthToken(host, user, password) - .then((data) => { - const postOptions = { - method: 'GET', - headers: { - 'x-f5-auth-token': data.token - } - }; - return util.makeRequest(host, uri, postOptions); - }); -} - -/** - * Fetch System Poller data from DUTs - * - * @param {Function} callback - callback(hostObj, data) - * - * @returns {Object} Promise resolved when all requests succeed - */ -function getSystemPollersData(callback) { - return Promise.all(duts.map( - (dut) => getSystemPollerData(dut, constants.DECL.SYSTEM_NAME) - .then((data) => callback(dut, data)) - )); -} - -/** - * Uninstall all TS packages - * - * @param {String} host - host - * @param {String} authToken - auth token - * @param {Object} options - request options - * - * @returns Promise resolved once TS packages removed from F5 device - */ -function uninstallAllTSpackages(host, authToken, options) { - const uri = `${constants.BASE_ILX_URI}/info`; - let error; - let data; - - return util.getInstalledPackages(host, authToken) - .then((installedPackages) => Promise.all(installedPackages - .filter((pkg) => pkg.packageName.includes('f5-telemetry')) - .map((pkg) => util.uninstallPackage(host, authToken, pkg.packageName)))) - .then(() => util.makeRequest(host, uri, options)) - .then((resp) => { - data = resp; - }) - .catch((err) => { - error = err; - }) - .then(() => { - if (data) { - throw new Error(`Unexpected response from ${uri}: ${JSON.stringify(data)}`); - } - if (error && error.statusCode !== 404) { - throw new Error(`Expected HTTP 404 Not Found. Actual error ${error}`); - } - }); -} - function setup() { - // get package details - const packageFile = packageDetails.name; - const packagePath = packageDetails.path; - util.logger.info(`Package File to install on DUT: ${packageFile} [${packagePath}]`); - - // create logs directory - used later - util.createDir(constants.ARTIFACTS_DIR); - - // account for 1+ DUTs - duts.forEach((item) => { - describe(`DUT setup - ${item.hostname}`, () => { - const host = item.ip; - const user = item.username; - const password = item.password; - - let authToken = null; - let options = {}; - - before(() => util.getAuthToken(host, user, password) - .then((data) => { - authToken = data.token; - })); - - beforeEach(() => { - options = { - headers: { - 'x-f5-auth-token': authToken - } - }; - }); - - it('should remove pre-existing TS declaration', () => { - const uri = `${constants.BASE_ILX_URI}/declare`; - const postOptions = { - method: 'POST', - headers: options.headers, - body: { - class: 'Telemetry' - } - }; - let data; - let error; - - return util.makeRequest(host, uri, postOptions) - .then((resp) => { - data = resp; - util.logger.info('Existing declaration:', { host, data }); - }) - .catch((err) => { - error = err; - }) - .then(() => { - // silently skip error - probably no TS package installed - if (error) { - return Promise.resolve(); - } - assert.strictEqual(data.message, 'success'); - // wait for 5 secs while declaration will be saved to storage - return util.sleep(5000); - }); - }); - - it('should remove pre-existing TS packages', () => uninstallAllTSpackages(host, authToken, options)); - - it('should erase restnoded log', () => { - const uri = '/mgmt/tm/util/bash'; - const postOptions = { - method: 'POST', - headers: options.headers, - body: JSON.stringify({ - command: 'run', - utilCmdArgs: '-c "> /var/log/restnoded/restnoded.log"' - }) - }; - return util.makeRequest(host, uri, postOptions); - }); - - it('should install package', () => { - const fullPath = `${packagePath}/${packageFile}`; - return util.installPackage(host, authToken, fullPath) - .then(() => {}); - }); - - it('should verify installation', () => { - const uri = `${constants.BASE_ILX_URI}/info`; - - return new Promise((resolve) => { setTimeout(resolve, 5000); }) - .then(() => util.makeRequest(host, uri, options)) - .then((data) => { - data = data || {}; - util.logger.info(`${uri} response`, { host, data }); - assert.notStrictEqual(data.version, undefined); - }) - .catch((err) => { - util.logger.error(`Unable to verify package installation due following error: ${err}`); - return Promise.reject(err); - }); - }); - - it('should configure firewall rules', () => { - // BIGIP 14.1+ only: - // to reach listener via mgmt IP might require allowing through host fw - // using below command (or similar): - // tmsh modify security firewall management-ip-rules rules replace-all-with { telemetry - // { place-before first ip-protocol tcp destination { - // ports replace-all-with { 6514 } } action accept } } - const uri = '/mgmt/tm/security/firewall/management-ip-rules/rules'; - const ruleName = 'telemetry'; - let passOnError = false; - - return util.makeRequest(host, uri, options) - .catch((error) => { - // older versions of BIG-IP do not have security enabled by default - const errMsg = error.message; - // just info the user that something unexpected happened but still trying to proceed - if (!errMsg.includes('must be licensed')) { - util.logger.error( - `Unable to configure management-ip rules, continue with current config. Error message: ${error.message}`, - { host } - ); - } - passOnError = true; - return Promise.reject(error); - }) - .then(() => util.makeRequest(host, uri, options)) - .then((data) => { - // check if rule already exists - if (data.items.some((i) => i.name === ruleName) === true) { - // exists, delete the rule - const deleteOptions = { - method: 'DELETE', - headers: options.headers - }; - return util.makeRequest(host, `${uri}/${ruleName}`, deleteOptions); - } - return Promise.resolve(); + const harness = harnessUtils.getDefaultHarness(); + const tsRPMInfo = miscUtils.getPackageDetails(); + logger.info('RPM to install on DUT(s):', { tsRPMInfo }); + + harness.bigip.forEach((bigip) => { + describe(`DUT setup - ${bigip.name}`, () => { + testUtils.shouldRemovePreExistingTSDeclaration(bigip); + testUtils.shouldRemovePreExistingTSPackage(bigip); + + it('should stop restnoded', () => bigip.ssh.default.exec('bigstart stop restnoded')); + + it('should erase restnoded log(s)', () => bigip.ssh.default.exec( + `rm -f ${pathUtil.join(constants.BIGIP.RESTNODED.LOGS_DIR, '*')}` + )); + + it('should start restnoded', () => bigip.ssh.default.exec('bigstart start restnoded')); + + testUtils.shouldInstallTSPackage(bigip, () => tsRPMInfo); + testUtils.shouldVerifyTSPackageInstallation(bigip); + + ['tcp', 'udp'].forEach((proto) => it( + `should configure firewall rules - ${proto} protocol`, + () => { + // BIGIP 14.1+ only: + // to reach listener via mgmt IP might require allowing through host fw + // using below command (or similar): + // tmsh modify security firewall management-ip-rules rules replace-all-with { telemetry + // { place-before first ip-protocol tcp destination { + // ports replace-all-with { 6514 } } action accept } } + const uri = '/mgmt/tm/security/firewall/management-ip-rules/rules'; + const ruleName = `telemetry-${proto}`; + let passOnError = false; + + return bigip.icontrol.default.makeRequestWithAuth({ + method: 'GET', + uri }) - .then(() => { - // create rule - const body = JSON.stringify({ - name: ruleName, - 'place-before': 'first', - action: 'accept', - ipProtocol: 'tcp', - destination: { - ports: [ - constants.EVENT_LISTENER_DEFAULT_PORT, - constants.EVENT_LISTENER_SECONDARY_PORT, - constants.EVENT_LISTENER_NAMESPACE_PORT, - constants.EVENT_LISTENER_NAMESPACE_SECONDARY_PORT - ].map((port) => ({ name: String(port) })) + .catch((error) => { + // older versions of BIG-IP do not have security enabled by default + const errMsg = error.message; + // just info the user that something unexpected happened but still trying to proceed + if (!errMsg.includes('must be licensed')) { + bigip.logger.error('Unable to configure management-ip rules, continue with current config:', error); + } + passOnError = true; + return Promise.reject(error); + }) + .then(() => bigip.icontrol.default.makeRequestWithAuth({ + method: 'GET', + uri + })) + .then((data) => { + // check if rule already exists + if (data.items.some((i) => i.name === ruleName) === true) { + // exists, delete the rule + return bigip.icontrol.default.makeRequestWithAuth({ + method: 'DELETE', + uri: `${uri}/${ruleName}` + }); } - }); - const postOptions = { - method: 'POST', - headers: options.headers, - body - }; - return util.makeRequest(host, uri, postOptions); - }) - .catch((err) => { - if (passOnError === true) { return Promise.resolve(); - } - return Promise.reject(err); - }); - }); + }) + .then(() => bigip.icontrol.default.makeRequestWithAuth({ + body: { + name: ruleName, + 'place-before': 'first', + action: 'accept', + ipProtocol: proto, + destination: { + ports: [ + constants.TELEMETRY.LISTENER.PORT.DEFAULT, + constants.TELEMETRY.LISTENER.PORT.SECONDARY, + constants.TELEMETRY.LISTENER.PORT.NAMESPACE, + constants.TELEMETRY.LISTENER.PORT.NAMESPACE_SECONDARY + ].map((port) => ({ name: String(port) })) + } + }, + json: true, + method: 'POST', + uri + })) + .catch((err) => (passOnError === true + ? Promise.resolve() + : Promise.reject(err))); + } + )); }); }); } +/** + * Tests for DUTs + */ function test() { + const harness = harnessUtils.getDefaultHarness(); const tests = [ { - name: 'basic declaration - default (no namespace)' + name: 'basic declaration - default (no namespace)', + eventListenerTests: true }, { name: 'basic declaration with namespace', @@ -407,23 +150,28 @@ function test() { }, { name: 'mixed declaration (default and namespace), verify default by "f5telemetry_default"', - namespace: DEFAULT_UNNAMED_NAMESPACE + namespace: DEFAULT_UNNAMED_NAMESPACE, + eventListenerTests: true }, { name: 'basic declaration - namespace endpoint', namespace: constants.DECL.NAMESPACE_NAME, + eventListenerTests: true, useNamespaceDeclare: true } ]; + const basicDeclaration = miscUtils.readJsonFile(constants.DECL.BASIC); + const namespaceDeclaration = miscUtils.readJsonFile(constants.DECL.BASIC_NAMESPACE); + function getDeclToUse(testSetup) { - let declaration = util.deepCopy(basicDeclaration); + let declaration = miscUtils.deepCopy(basicDeclaration); if (testSetup.name.startsWith('mixed')) { - declaration.My_Namespace = util.deepCopy(namespaceDeclaration.My_Namespace); + declaration.My_Namespace = miscUtils.deepCopy(namespaceDeclaration.My_Namespace); } else if (testSetup.useNamespaceDeclare) { - declaration = util.deepCopy(namespaceDeclaration.My_Namespace); + declaration = miscUtils.deepCopy(namespaceDeclaration.My_Namespace); } else if (testSetup.namespace && testSetup.namespace !== DEFAULT_UNNAMED_NAMESPACE) { - declaration = util.deepCopy(namespaceDeclaration); + declaration = miscUtils.deepCopy(namespaceDeclaration); } return declaration; } @@ -431,7 +179,7 @@ function test() { // Selectively skip tests if the testSetup uses Namespaces // Tests that validate more complex SystemPoller logic can be skipped when only testing Namespace logic function ifNoNamespaceIt(title, testSetup, testFunc) { - return testSetup.namespace ? it.skip(title, testFunc) : it(title, testFunc); + return testSetup.namespace ? () => {} : it(title, testFunc); } function searchCipherTexts(data, cb) { @@ -459,7 +207,7 @@ function test() { let secretsFound = 0; searchCipherTexts(data, (cipherText) => { secretsFound += 1; - assert.strictEqual(cipherText.startsWith('$M'), true, 'cipherText should start with $M$'); + assert.isTrue(cipherText.startsWith('$M'), true, 'cipherText should start with $M$'); }); assert.notStrictEqual(secretsFound, 0, 'Expected at least 1 cipherText field'); } @@ -468,372 +216,476 @@ function test() { searchCipherTexts(data, () => 'replacedSecret'); } - tests.forEach((testSetup) => { - describe(`${testSetup.name}`, () => { - const namespacePath = testSetup.namespace ? `/namespace/${testSetup.namespace}` : ''; + harness.bigip.forEach((bigip) => { + describe(`DUT test - ${bigip.name}`, () => { + describe('Event Listener tests', () => { + it('should ensure event listener is up', () => { + const allListenerPorts = [ + constants.TELEMETRY.LISTENER.PORT.DEFAULT, + constants.TELEMETRY.LISTENER.PORT.SECONDARY, + constants.TELEMETRY.LISTENER.PORT.NAMESPACE, + constants.TELEMETRY.LISTENER.PORT.NAMESPACE_SECONDARY + ]; + + /** + * Connector to Event Listener + * + * @param {number} port - port number + * @param {boolean} [retry = false] - retry on fail + * + * @returns {Promise} resolved once successfully connected + */ + function connectToListener(port, retry) { + return bigip.tcp.ping(port, { + retry: { + // re-try 10 times for opened ports and 2 times for closed + // (in case of random conn error) + maxTries: retry ? 10 : 2, + delay: retry ? 300 : 50 + } + }); + } - duts.forEach((item) => { - describe(`DUT test - ${item.hostname}`, () => { - const host = item.ip; - const user = item.username; - const password = item.password; + /** + * Check Event Listener ports + * + * @param {Object} ports - ports to check + * @param {Array} [ports.closed] - closed ports + * @param {Array} [ports.opened] - opened ports + * + * @returns {Promise} resolved once all conditions satisfied + */ + function checkListenerPorts(ports) { + bigip.logger.info('Checking following event listener ports', { ports }); + + const promises = { + closed: promiseUtils.allSettled((ports.closed || []) + .map((port) => connectToListener(port))), + opened: promiseUtils.allSettled((ports.opened || []) + .map((port) => connectToListener(port, true))) + }; + return Promise.all([promises.closed, promises.opened]) + .then((results) => { + const closed = results[0]; + const opened = results[1]; + + const notClosed = closed + .map((ret, idx) => [ret.status === 'fulfilled', ports.closed[idx]]) + .filter((ret) => ret[0]) + .map((ret) => ret[1]); + if (notClosed.length > 0) { + throw new Error(`Port(s) ${notClosed.join(', ')} should be closed`); + } - let authToken = null; - let options = {}; + const notOpened = opened + .map((ret, idx) => [ret.status === 'rejected', ports.closed[idx]]) + .filter((ret) => ret[0]) + .map((ret) => ret[1]); + if (notOpened.length > 0) { + throw new Error(`Port(s) ${notOpened.join(', ')} should be opened`); + } + }); + } - before(() => util.getAuthToken(host, user, password) - .then((data) => { - authToken = data.token; - })); + /** + * Find Event Listeners in declaration + * + * @param {Object} obj - declaration + * @param {function} cb - callbacks + */ + const findListeners = (obj, cb) => { + if (typeof obj === 'object') { + if (obj.class === 'Telemetry_Listener') { + cb(obj); + } else { + Object.keys(obj).forEach((key) => findListeners(obj[key], cb)); + } + } + }; + + /** + * Get expected state for each port + * + * @param {Object} decl - declaration + * + * @returns {{opened: Array, closed: Array}} + */ + const expectedPortStates = (decl) => { + const ports = { closed: [], opened: [] }; + findListeners(decl, (listener) => { + const enabled = typeof listener.enable === 'undefined' ? true : listener.enable; + (enabled ? ports.opened : ports.closed) + .push(listener.port || constants.TELEMETRY.LISTENER.PORT.DEFAULT); + }); + allListenerPorts.forEach((port) => { + if (ports.opened.indexOf(port) === -1) { + ports.closed.push(port); + } + }); + // remove dups + ports.opened = ports.opened.filter((port, idx) => ports.opened.indexOf(port) === idx); + // remove dups and opened ports + ports.closed = ports.closed.filter((port, idx) => ports.closed.indexOf(port) === idx + && ports.opened.indexOf(port) === -1); + + assert.sameMembers( + [].concat(ports.closed, ports.opened), + allListenerPorts, + 'should use all expected ports' + ); + assert.deepStrictEqual( + ports.closed.filter((port) => ports.opened.indexOf(port) !== -1), + [], + 'should use different opened and closed ports' + ); + return ports; + }; + + /** + * Create Telemetry_Listener object + * + * @param {integer} port - port + * + * @returns {Object} listener object + */ + const createListener = (port, enable) => ({ + class: 'Telemetry_Listener', + enable: typeof enable === 'undefined' ? true : enable, + port, + trace: false + }); - beforeEach(() => { - options = { - headers: { - 'x-f5-auth-token': authToken + /** + * Run sub-test + * + * @param {string} name - sub-test name + * @param {function} f - func tp run + * + * @returns {Promise} + */ + const runSubTest = (function (name, declaration, namespace) { + this.subTestID += 1; + // keep 'promiseUtils.loopForEach' in case we will need to test posting same declaration twice + return promiseUtils.loopForEach([1], (item) => { + bigip.logger.info(`SubTest ${this.subTestID}.${item}: ${name}`); + + let ts = bigip.telemetry; + if (namespace) { + ts = ts.toNamespace(namespace); } - }; + return ts.declare(miscUtils.deepCopy(declaration)) + .then((data) => { + bigip.logger.info('Declaration POST results:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); + return bigip.telemetry.getDeclaration(); + }) + .then((data) => { + bigip.logger.info('Declaration GET results:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); + + return assert.isFulfilled( + checkListenerPorts(expectedPortStates(data)), + 'should have opened and closed ports as expected' + ); + }); + }); + }).bind({ + subTestID: 0 }); + return Promise.resolve() + .then(() => runSubTest('Empty declaration in default namespace', { + class: 'Telemetry' + })) + .then(() => runSubTest('Declaration with 1 enabled listener in default namespace', { + class: 'Telemetry', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT) + })) + .then(() => runSubTest('Declaration with 2 enabled listeners (different ports) in default namespace', { + class: 'Telemetry', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.SECONDARY) + })) + .then(() => runSubTest('Declaration with 2 enabled listeners (same ports) in default namespace', { + class: 'Telemetry', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT) + })) + .then(() => runSubTest('Declaration with 1 enabled and 1 disabled listeners (same ports) in default namespace', { + class: 'Telemetry', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT, false) + })) + .then(() => runSubTest('Declaration with 1 enabled and 1 disabled listeners (different ports) in default namespace', { + class: 'Telemetry', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.SECONDARY, false) + })) + .then(() => runSubTest('Empty declaration in Namespace', { + class: 'Telemetry_Namespace' + }, 'Namespace')) + .then(() => runSubTest('Declaration with 1 enabled listener in Namespace', { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE) + }, 'Namespace')) + .then(() => runSubTest('Declaration with 2 enabled listeners (different ports) in Namespace', { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE_SECONDARY) + }, 'Namespace')) + .then(() => runSubTest('Declaration with 2 enabled listeners (same ports) in Namespace', { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE_SECONDARY) + }, 'Namespace')) + .then(() => runSubTest('Declaration with 1 enabled and 1 disabled listeners (same ports) in Namespace', { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE_SECONDARY, false) + }, 'Namespace')) + .then(() => runSubTest('Declaration with 1 enabled and 1 disabled listeners (different ports) in Namespace', { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE_SECONDARY, false) + }, 'Namespace')) + .then(() => runSubTest('Declaration with 1 enabled and 1 disabled listeners (same ports as in default namespace) in Namespace', { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.SECONDARY, false) + }, 'Namespace')) + .then(() => runSubTest('Declaration with 2 enabled listeners (different ports) in each namespace', { + class: 'Telemetry', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.SECONDARY), + Namespace: { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.NAMESPACE_SECONDARY) + } + })) + .then(() => runSubTest('Declaration with 2 enabled listeners (same ports) in each namespace', { + class: 'Telemetry', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.SECONDARY), + Namespace: { + class: 'Telemetry_Namespace', + Listener_1: createListener(constants.TELEMETRY.LISTENER.PORT.DEFAULT), + Listener_2: createListener(constants.TELEMETRY.LISTENER.PORT.SECONDARY) + } + })) + .then(() => runSubTest('Empty declaration in Namespace', { + class: 'Telemetry_Namespace' + }, 'Namespace')) + .then(() => runSubTest('Empty declaration in default namespace', { + class: 'Telemetry' + })); + }); + }); + + describe('System Poller tests', () => { + it('should fetch and process SNMP metrics', () => promiseUtils.sleep(500) + .then(() => miscUtils.readJsonFile(constants.DECL.SNMP_METRICS, true)) + .then((decl) => bigip.telemetry.declare(decl)) + .then((data) => { + bigip.logger.info('POST declaration:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); + // wait 5s in case if config was not applied yet + return promiseUtils.sleep(500); + }) + .then(() => bigip.telemetry.getSystemPollerData(constants.DECL.SYSTEM_NAME)) + .then((data) => { + bigip.logger.info(`SystemPoller "${constants.DECL.SYSTEM_NAME}" response:`, { data }); + assert.isArray(data, 'should be array'); + assert.isNotEmpty(data, 'should have at least one element'); + // verify that 'system' key and child objects are included + data = data[0]; + + const snmpName = 'hrDeviceStatus.196608'; + + assert.isString(data.hrDeviceStatusOrigin[snmpName], 'should not convert SNMP enum to metric'); + assert.isNotEmpty(data.hrDeviceStatusOrigin[snmpName], 'should fetch SNMP enum value'); + + assert.isString(data.hrDeviceStatusOriginWithOptions[snmpName], 'should not convert SNMP enum to metric'); + assert.isNotEmpty(data.hrDeviceStatusOriginWithOptions[snmpName], 'should fetch SNMP enum value'); + + assert.isNumber(data.hrDeviceStatusAsMetric[snmpName], 'should convert SNMP enum to metric'); + })); + }); + + tests.forEach((testSetup) => { + describe(`${testSetup.name}`, () => { + const defaultTelemetry = bigip.telemetry; + const namespaceTelemetry = testSetup.namespace + ? bigip.telemetry.toNamespace(testSetup.namespace) + : defaultTelemetry; + describe('basic checks', () => { it('should post same configuration twice and get it after', () => { - const uri = testSetup.useNamespaceDeclare ? `${constants.BASE_ILX_URI}${namespacePath}/declare` : `${constants.BASE_ILX_URI}/declare`; - const postOptions = Object.assign(util.deepCopy(options), { - method: 'POST', - body: getDeclToUse(testSetup) - }); + const ts = testSetup.useNamespaceDeclare + ? namespaceTelemetry + : defaultTelemetry; + let postResponses = []; // wait 2s to buffer consecutive POSTs - return util.sleep(2000) - .then(() => util.makeRequest(host, uri, util.deepCopy(postOptions))) + return promiseUtils.sleep(500) + .then(() => ts.declare(getDeclToUse(testSetup))) .then((data) => { - util.logger.info('POST request #1: Declaration response:', { host, data }); - assert.strictEqual(data.message, 'success'); + bigip.logger.info('POST request #1: Declaration response:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); checkPassphraseObject(data); postResponses.push(data); // wait for 5 secs while declaration will be applied and saved to storage - return util.sleep(5000); + return promiseUtils.sleep(500); }) - .then(() => util.makeRequest(host, uri, util.deepCopy(postOptions))) + .then(() => ts.declare(getDeclToUse(testSetup))) .then((data) => { - util.logger.info('POST request #2: Declaration response:', { host, data }); - assert.strictEqual(data.message, 'success'); + bigip.logger.info('POST request #2: Declaration response:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); checkPassphraseObject(data); postResponses.push(data); - // wait for 5 secs while declaration will be applied and saved to storage - return util.sleep(5000); + return promiseUtils.sleep(500); }) - .then(() => util.makeRequest(host, uri, util.deepCopy(options))) + .then(() => ts.getDeclaration()) .then((data) => { - util.logger.info('GET request: Declaration response:', { host, data }); - assert.strictEqual(data.message, 'success'); + bigip.logger.info('GET request: Declaration response:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); checkPassphraseObject(data); postResponses.push(data); // compare GET to recent POST - assert.deepStrictEqual(postResponses[2], postResponses[1]); + assert.deepStrictEqual(postResponses[2], postResponses[1], 'should return same declaration every time'); // lest compare first POST to second POST (only one difference is secrets) postResponses = postResponses.map(removeCipherTexts); - assert.deepStrictEqual(postResponses[0], postResponses[1]); + assert.deepStrictEqual(postResponses[0], postResponses[1], 'should return same declaration every time'); }) .then(() => { if (testSetup.useNamespaceDeclare) { - util.logger.info('Additional test for namespace endpoint - verify full declaration'); - const url = `${constants.BASE_ILX_URI}/declare`; - - return util.makeRequest(host, url, util.deepCopy(options)) + bigip.logger.info('Additional test for namespace endpoint - verify full declaration'); + return defaultTelemetry.getDeclaration() .then((data) => { - util.logger.info('GET request: Declaration response', { host, data }); - assert.strictEqual(data.message, 'success'); + bigip.logger.info('GET request: Declaration response', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); // verify merged decl - assert.isTrue(typeof data.declaration[constants.DECL.NAMESPACE_NAME] !== 'undefined'); // named namespace - assert.isTrue(typeof data.declaration[constants.DECL.SYSTEM_NAME] !== 'undefined'); // default namespace + assert.isTrue(typeof data.declaration[constants.DECL.NAMESPACE_NAME] !== 'undefined', 'should return expected declaration'); // named namespace + assert.isTrue(typeof data.declaration[constants.DECL.SYSTEM_NAME] !== 'undefined', 'should return expected declaration'); // default namespace }); } return Promise.resolve(); }); }); - it('should get response from systempoller endpoint', () => { - const uri = `${constants.BASE_ILX_URI}${namespacePath}/systempoller/${constants.DECL.SYSTEM_NAME}`; - // wait 500ms in case if config was not applied yet - return util.sleep(500) - .then(() => util.makeRequest(host, uri, options)) - .then((data) => { - data = data || []; - util.logger.info(`SystemPoller response (${uri}):`, { host, data }); - assert.strictEqual(data.length, 1); - // read schema and validate data - data = data[0]; - const schema = JSON.parse(fs.readFileSync(constants.DECL.SYSTEM_POLLER_SCHEMA)); - const valid = util.validateAgainstSchema(data, schema); - if (valid !== true) { - assert.fail(`output is not valid: ${JSON.stringify(valid.errors)}`); - } - }); - }); - - it('should ensure event listener is up', () => { - const connectToEventListener = (port) => new Promise((resolve, reject) => { - const client = net.createConnection({ host, port }, () => { - client.end(); - }); - client.on('end', () => { - resolve(); - }); - client.on('error', (err) => { - reject(new Error(`Can not connect to TCP port ${port}: ${err}`)); - }); - }); - - // ports = { opened: [], closed: [] } - const checkPorts = (ports) => Promise.all( - (ports.opened || []).map( - (openedPort) => assert.isFulfilled(connectToEventListener(openedPort)) - ).concat( - (ports.closed || []).map( - (closedPort) => connectToEventListener(closedPort) - .then( - () => Promise.reject(new Error(`Port ${closedPort} expected to be closed`)), - () => {} // do nothing on catch - ) - ) - ) - ); - - const findListeners = (obj, cb) => { - if (typeof obj === 'object') { - if (obj.class === 'Telemetry_Listener') { - cb(obj); - } else { - Object.keys(obj).forEach((key) => findListeners(obj[key], cb)); - } + // wait 500ms in case if config was not applied yet + it('should get response from systempoller endpoint', () => promiseUtils.sleep(500) + .then(() => namespaceTelemetry.getSystemPollerData(constants.DECL.SYSTEM_NAME)) + .then((data) => { + bigip.logger.info(`SystemPoller "${constants.DECL.SYSTEM_NAME}" response:`, { data }); + assert.isArray(data, 'should be array'); + assert.isNotEmpty(data, 'should have at least one element'); + // read schema and validate data + data = data[0]; + const schema = miscUtils.readJsonFile(constants.DECL.SYSTEM_POLLER_SCHEMA); + const valid = miscUtils.validateAgainstSchema(data, schema); + if (valid !== true) { + assert.fail(`output is not valid: ${JSON.stringify(valid.errors)}`); } - }; - - const fetchListenerPorts = (decl) => { - const ports = []; - findListeners(decl, (listener) => ports.push(listener.port || 6514)); - return ports; - }; - - const allListenerPorts = [ - constants.EVENT_LISTENER_DEFAULT_PORT, - constants.EVENT_LISTENER_SECONDARY_PORT, - constants.EVENT_LISTENER_NAMESPACE_PORT, - constants.EVENT_LISTENER_NAMESPACE_SECONDARY_PORT - ]; - const newPorts = [ - constants.EVENT_LISTENER_SECONDARY_PORT, - constants.EVENT_LISTENER_NAMESPACE_SECONDARY_PORT - ]; - - const uri = testSetup.useNamespaceDeclare ? `${constants.BASE_ILX_URI}${namespacePath}/declare` : `${constants.BASE_ILX_URI}/declare`; - const postOptions = Object.assign(util.deepCopy(options), { - method: 'POST', - body: testSetup.useNamespaceDeclare ? { class: 'Telemetry_Namespace' } : { class: 'Telemetry' } - }); - return util.makeRequest(host, uri, util.deepCopy(postOptions)) - .then(() => checkPorts({ - closed: util.deepCopy(allListenerPorts) - })) - .then(() => { - postOptions.body = getDeclToUse(testSetup); - return util.makeRequest(host, uri, util.deepCopy(postOptions)); - }) - .then(() => { - const ports = { opened: fetchListenerPorts(postOptions.body) }; - ports.closed = allListenerPorts.filter((port) => ports.opened.indexOf(port) === -1); - return checkPorts(ports); - }) - // post declaration again and check that listeners are still available - .then(() => util.makeRequest(host, uri, util.deepCopy(postOptions))) - .then(() => { - const ports = { opened: fetchListenerPorts(postOptions.body) }; - ports.closed = allListenerPorts.filter((port) => ports.opened.indexOf(port) === -1); - return checkPorts(ports); - }) - .then(() => { - let idx = 0; - // already a copy - postOptions.body = getDeclToUse(testSetup); - // disable all listeners - findListeners(postOptions.body, (listener) => { - if (idx >= newPorts.length) { - throw new Error(`Expected ${newPorts.length} listeners only`); - } - listener.port = newPorts[idx]; - idx += 1; - }); - return util.makeRequest(host, uri, util.deepCopy(postOptions)); - }) - .then(() => { - const ports = { opened: fetchListenerPorts(postOptions.body) }; - ports.closed = allListenerPorts.filter((port) => ports.opened.indexOf(port) === -1); - return checkPorts(ports); - }) - // post declaration again and check that listeners are still available - .then(() => util.makeRequest(host, uri, util.deepCopy(postOptions))) - .then(() => { - const ports = { opened: fetchListenerPorts(postOptions.body) }; - ports.closed = allListenerPorts.filter((port) => ports.opened.indexOf(port) === -1); - return checkPorts(ports); - }) - .then(() => { - // already a copy - postOptions.body = getDeclToUse(testSetup); - // disable all listeners - findListeners(postOptions.body, (listener) => { - listener.enable = false; - }); - return util.makeRequest(host, uri, util.deepCopy(postOptions)); - }) - .then(() => checkPorts({ - closed: util.deepCopy(allListenerPorts) - })); - }); + })); }); describe('advanced options', () => { - ifNoNamespaceIt('should apply configuration containing system poller filtering', testSetup, () => { - let uri = `${constants.BASE_ILX_URI}/declare`; - const postOptions = Object.assign(util.deepCopy(options), { - method: 'POST', - body: fs.readFileSync(constants.DECL.FILTER).toString() - }); - - // wait 2s to buffer consecutive POSTs - return util.sleep(2000) - .then(() => util.makeRequest(host, uri, postOptions)) - .then((data) => { - util.logger.info('Declaration response:', { host, data }); - assert.strictEqual(data.message, 'success'); - // wait 5s in case if config was not applied yet - return util.sleep(5000); - }) - .then(() => { - uri = `${constants.BASE_ILX_URI}${namespacePath}/systempoller/${constants.DECL.SYSTEM_NAME}`; - return util.makeRequest(host, uri, util.deepCopy(options)); - }) - .then((data) => { - data = data || []; - util.logger.info(`Filtered SystemPoller response (${uri}):`, { host, data }); - - assert.strictEqual(data.length, 1); - // verify that certain data was filtered out, while other data was preserved - data = data[0]; - assert.strictEqual(Object.keys(data.system).indexOf('provisioning'), -1); - assert.strictEqual(Object.keys(data.system.diskStorage).indexOf('/usr'), -1); - assert.notStrictEqual(Object.keys(data.system.diskStorage).indexOf('/'), -1); - assert.notStrictEqual(Object.keys(data.system).indexOf('version'), -1); - assert.notStrictEqual(Object.keys(data.system).indexOf('hostname'), -1); - }); - }); - - ifNoNamespaceIt('should apply configuration containing chained system poller actions', testSetup, () => { - let uri = `${constants.BASE_ILX_URI}/declare`; - const postOptions = Object.assign(util.deepCopy(options), { - method: 'POST', - body: fs.readFileSync(constants.DECL.ACTION_CHAINING).toString() - }); - - // wait 2s to buffer consecutive POSTs - return util.sleep(2000) - .then(() => util.makeRequest(host, uri, postOptions)) - .then((data) => { - util.logger.info('Declaration response:', { host, data }); - assert.strictEqual(data.message, 'success'); - // wait 5s in case if config was not applied yet - return util.sleep(5000); - }) - .then(() => { - uri = `${constants.BASE_ILX_URI}${namespacePath}/systempoller/${constants.DECL.SYSTEM_NAME}`; - return util.makeRequest(host, uri, util.deepCopy(options)); - }) - .then((data) => { - data = data || {}; - util.logger.info(`Filtered SystemPoller response (${uri}):`, { host, data }); - - assert.strictEqual(data.length, 1); - data = data[0]; - // verify /var is included with, with 1_tagB removed - assert.notStrictEqual(Object.keys(data.system.diskStorage).indexOf('/var'), -1); - assert.deepEqual(data.system.diskStorage['/var']['1_tagB'], { '1_valueB_1': 'value1' }); - // verify /var/log is included with, with 1_tagB included - assert.strictEqual(Object.keys(data.system.diskStorage['/var/log']).indexOf('1_tagB'), -1); - assert.deepEqual(data.system.diskStorage['/var/log']['1_tagA'], 'myTag'); - }); - }); - - ifNoNamespaceIt('should apply configuration containing filters with ifAnyMatch', testSetup, () => { - let uri = `${constants.BASE_ILX_URI}/declare`; - const postOptions = Object.assign(util.deepCopy(options), { - method: 'POST', - body: fs.readFileSync(constants.DECL.FILTERING_WITH_MATCHING).toString() - }); - - // wait 2s to buffer consecutive POSTs - return util.sleep(2000) - .then(() => util.makeRequest(host, uri, postOptions)) - .then((data) => { - util.logger.info('Declaration response:', { host, data }); - assert.strictEqual(data.message, 'success'); - // wait 5s in case if config was not applied yet - return util.sleep(5000); - }) - .then(() => { - uri = `${constants.BASE_ILX_URI}${namespacePath}/systempoller/${constants.DECL.SYSTEM_NAME}`; - return util.makeRequest(host, uri, util.deepCopy(options)); - }) - .then((data) => { - data = data || {}; - util.logger.info(`Filtered and Matched SystemPoller response (${uri}):`, { host, data }); - - assert.strictEqual(data.length, 1); - data = data[0]; - // verify that 'system' key and child objects are included - assert.deepEqual(Object.keys(data), ['system']); - assert.ok(Object.keys(data.system).length > 1); - // verify that 'system.diskStorage' is NOT excluded - assert.notStrictEqual(Object.keys(data.system).indexOf('diskStorage'), -1); - }); - }); - - ifNoNamespaceIt('should apply configuration containing multiple system pollers and endpointList', testSetup, () => { - let uri = `${constants.BASE_ILX_URI}/declare`; - const postOptions = Object.assign(util.deepCopy(options), { - method: 'POST', - body: fs.readFileSync(constants.DECL.ENDPOINTLIST).toString() - }); - - // wait 2s to buffer consecutive POSTs - return util.sleep(2000) - .then(() => util.makeRequest(host, uri, postOptions)) - .then((data) => { - util.logger.info('Declaration response:', { host, data }); - assert.strictEqual(data.message, 'success'); - // wait 2s in case if config was not applied yet - return util.sleep(2000); - }) - .then(() => { - uri = `${constants.BASE_ILX_URI}${namespacePath}/systempoller/${constants.DECL.SYSTEM_NAME}`; - return util.makeRequest(host, uri, util.deepCopy(options)); - }) - .then((data) => { - util.logger.info(`System Poller with endpointList response (${uri}):`, { host, data }); - assert.ok(Array.isArray(data)); - - const pollerOneData = data[0]; - const pollerTwoData = data[1]; - assert.notStrictEqual(pollerOneData.custom_ipOther, undefined); - assert.notStrictEqual(pollerOneData.custom_dns, undefined); - assert.ok(pollerTwoData.custom_provisioning.items.length > 0); - }); - }); + ifNoNamespaceIt('should apply configuration containing system poller filtering', testSetup, () => promiseUtils.sleep(500) + .then(() => miscUtils.readJsonFile(constants.DECL.FILTER, true)) + .then((decl) => defaultTelemetry.declare(decl)) + .then((data) => { + bigip.logger.info('POST declaration:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); + // wait 5s in case if config was not applied yet + return promiseUtils.sleep(500); + }) + .then(() => namespaceTelemetry.getSystemPollerData(constants.DECL.SYSTEM_NAME)) + .then((data) => { + bigip.logger.info(`SystemPoller "${constants.DECL.SYSTEM_NAME}" response:`, { data }); + assert.isArray(data, 'should be array'); + assert.isNotEmpty(data, 'should have at least one element'); + // verify that certain data was filtered out, while other data was preserved + data = data[0]; + assert.deepStrictEqual(Object.keys(data.system).indexOf('provisioning'), -1); + assert.deepStrictEqual(Object.keys(data.system.diskStorage).indexOf('/usr'), -1); + assert.notStrictEqual(Object.keys(data.system.diskStorage).indexOf('/'), -1); + assert.notStrictEqual(Object.keys(data.system).indexOf('version'), -1); + assert.notStrictEqual(Object.keys(data.system).indexOf('hostname'), -1); + })); + + ifNoNamespaceIt('should apply configuration containing chained system poller actions', testSetup, () => promiseUtils.sleep(500) + .then(() => miscUtils.readJsonFile(constants.DECL.ACTION_CHAINING, true)) + .then((decl) => defaultTelemetry.declare(decl)) + .then((data) => { + bigip.logger.info('POST declaration:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); + // wait 5s in case if config was not applied yet + return promiseUtils.sleep(500); + }) + .then(() => namespaceTelemetry.getSystemPollerData(constants.DECL.SYSTEM_NAME)) + .then((data) => { + bigip.logger.info(`SystemPoller "${constants.DECL.SYSTEM_NAME}" response:`, { data }); + assert.isArray(data, 'should be array'); + assert.isNotEmpty(data, 'should have at least one element'); + // verify /var is included with, with 1_tagB removed + data = data[0]; + assert.notStrictEqual(Object.keys(data.system.diskStorage).indexOf('/var'), -1); + assert.deepStrictEqual(data.system.diskStorage['/var']['1_tagB'], { '1_valueB_1': 'value1' }); + // verify /var/log is included with, with 1_tagB included + assert.deepStrictEqual(Object.keys(data.system.diskStorage['/var/log']).indexOf('1_tagB'), -1); + assert.deepStrictEqual(data.system.diskStorage['/var/log']['1_tagA'], 'myTag'); + })); + + ifNoNamespaceIt('should apply configuration containing filters with ifAnyMatch', testSetup, () => promiseUtils.sleep(500) + .then(() => miscUtils.readJsonFile(constants.DECL.FILTERING_WITH_MATCHING, true)) + .then((decl) => defaultTelemetry.declare(decl)) + .then((data) => { + bigip.logger.info('POST declaration:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); + // wait 5s in case if config was not applied yet + return promiseUtils.sleep(500); + }) + .then(() => namespaceTelemetry.getSystemPollerData(constants.DECL.SYSTEM_NAME)) + .then((data) => { + bigip.logger.info(`SystemPoller "${constants.DECL.SYSTEM_NAME}" response:`, { data }); + assert.isArray(data, 'should be array'); + assert.isNotEmpty(data, 'should have at least one element'); + // verify that 'system' key and child objects are included + data = data[0]; + assert.deepStrictEqual(Object.keys(data), ['system']); + assert.ok(Object.keys(data.system).length > 1); + // verify that 'system.diskStorage' is NOT excluded + assert.notStrictEqual(Object.keys(data.system).indexOf('diskStorage'), -1); + })); + + ifNoNamespaceIt('should apply configuration containing multiple system pollers and endpointList', testSetup, () => promiseUtils.sleep(500) + .then(() => miscUtils.readJsonFile(constants.DECL.ENDPOINTLIST, true)) + .then((decl) => defaultTelemetry.declare(decl)) + .then((data) => { + bigip.logger.info('POST declaration:', { data }); + assert.deepStrictEqual(data.message, 'success', 'should return successful response'); + // wait 2s in case if config was not applied yet + return promiseUtils.sleep(500); + }) + .then(() => namespaceTelemetry.getSystemPollerData(constants.DECL.SYSTEM_NAME)) + .then((data) => { + bigip.logger.info(`SystemPoller "${constants.DECL.SYSTEM_NAME}" response:`, { data }); + assert.isArray(data, 'should be array'); + assert.deepStrictEqual(data.length, 2, 'should have two elements'); + + const pollerOneData = data[0]; + const pollerTwoData = data[1]; + assert.notStrictEqual(pollerOneData.custom_ipOther, undefined); + assert.notStrictEqual(pollerOneData.custom_dns, undefined); + assert.ok(pollerTwoData.custom_provisioning.items.length > 0); + })); }); }); }); @@ -841,83 +693,54 @@ function test() { }); } +/** + * Teardown DUTs + */ function teardown() { - // purpose: cleanup tests - // account for 1+ DUTs - duts.forEach((item) => { - describe(`Cleanup DUT - ${item.hostname}`, () => { - const host = item.ip; - const user = item.username; - const password = item.password; - - let authToken = null; - let logFile = null; - let options = {}; - - before(() => util.getAuthToken(host, user, password) - .then((data) => { - authToken = data.token; - })); - - beforeEach(() => { - options = { - headers: { - 'x-f5-auth-token': authToken - } - }; + const cmd = `cat ${pathUtil.join(constants.BIGIP.RESTNODED.LOGS_DIR, '*.log')} | grep telemetry`; + const harness = harnessUtils.getDefaultHarness(); + const getLogFilePath = (bigip) => `${constants.ARTIFACTS_DIR}/restnoded_${bigip.name}__${bigip.host.host}.log`; + const regexp = /\[telemetry][\S\s]*error/i; + + harness.bigip.forEach((bigip) => { + describe(`Cleanup DUT - ${bigip.name}`, () => { + let logFilePath; + + before(() => { + logFilePath = getLogFilePath(bigip); + bigip.logger.info('Path to save restnoded logs:', { logFilePath }); }); - it('should get restnoded log', () => { - // grab restnoded log - useful during test failures - // interested only in lines with 'telemetry' - const uri = '/mgmt/tm/util/bash'; - const postOptions = { - method: 'POST', - headers: options.headers, - body: JSON.stringify({ - command: 'run', - utilCmdArgs: '-c "cat /var/log/restnoded/restnoded.log | grep telemetry"' - }) - }; - return util.makeRequest(host, uri, postOptions) - .then((data) => { - logFile = `${constants.ARTIFACTS_DIR}/restnoded_${host}.log`; - util.logger.info(`Saving restnoded log to ${logFile}`); - fs.writeFileSync(logFile, data.commandResult); - }); - }); + // grab restnoded log - useful during test failures + // interested only in lines with 'telemetry' + it('should get restnoded log', () => bigip.icAPI.default.runBashCmd(cmd) + .then((response) => { + bigip.logger.info('Saving restnoded logs to:', { logFilePath }); + fs.writeFileSync(logFilePath, response.commandResult); + })); - it('should check restnoded log for errors in [telemetry] messages', () => { + it('should check restnoded log for errors in [telemetry] messages', () => new Promise((resolve, reject) => { let errCounter = 0; - const regexp = /\[telemetry][\S\s]*error/i; - const rl = readline.createInterface({ - input: fs.createReadStream(logFile) + input: fs.createReadStream(logFilePath) }); rl.on('line', (line) => { if (regexp.test(line)) { errCounter += 1; } }); - return new Promise((resolve) => { - rl.on('close', resolve); - }) - .then(() => { - if (errCounter) { - return Promise.reject(new Error(`${errCounter} error messages were found in ${logFile}`)); - } - return Promise.resolve(); - }); - }); - - it('should remove existing TS packages', () => new Promise((resolve, reject) => { - uninstallAllTSpackages(host, authToken, options) - .then(resolve) - .catch((err) => { - util.logger.info(`Unable to verify package uninstall due following error: ${err}`); - setTimeout(() => reject(err), 5000); - }); + rl.on('close', () => { + if (errCounter) { + reject(new Error(`${errCounter} error messages were found in ${logFilePath}`)); + } else { + resolve(); + } + }); })); + + testUtils.shouldRemovePreExistingTSPackage(bigip); + + it('teardown all connections', () => bigip.teardown()); }); }); } @@ -925,14 +748,5 @@ function teardown() { module.exports = { setup, test, - teardown, - utils: { - getPullConsumerData, - getSystemPollerData, - getSystemPollersData, - postDeclarationToDUT, - postDeclarationToDUTs, - sendDataToEventListener, - sendDataToEventListeners - } + teardown }; diff --git a/test/functional/pullConsumerSystemTests.js b/test/functional/pullConsumerSystemTests.js deleted file mode 100644 index 33f91266..00000000 --- a/test/functional/pullConsumerSystemTests.js +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for - * license terms. Notwithstanding anything to the contrary in the EULA, Licensee - * may copy and modify this software product for its internal business purposes. - * Further, Licensee may upload, publish and distribute the modified version of - * the software product on devcentral.f5.com. - */ - -// this object not passed with lambdas, which mocha uses - -'use strict'; - -const fs = require('fs'); - -const constants = require('./shared/constants'); -const util = require('./shared/util'); - -// string -> object (consumer module) -let pullConsumersMap = {}; - -function loadPullConsumers() { - // env var to run only specific pull consumer type(s) (e.g. 'default') - const pullConsumerFilter = process.env[constants.ENV_VARS.PULL_CONSUMER_HARNESS.TYPE_REGEX]; - const pullConsumerDir = constants.PULL_CONSUMERS_DIR; - let pullConsumers = fs.readdirSync(pullConsumerDir); - - // filter consumers by module name if needed - if (pullConsumerFilter) { - util.logger.info(`Using filter '${pullConsumerFilter}' to filter modules from '${pullConsumerDir}'`); - pullConsumers = pullConsumers.filter((fName) => fName.match(new RegExp(pullConsumerFilter, 'i')) !== null); - } - - const mapping = {}; - pullConsumers.forEach((pullConsumer) => { - const cpath = `${pullConsumerDir}/${pullConsumer}`; - mapping[pullConsumer] = require(cpath); //eslint-disable-line - util.logger.info(`Pull Consumer Tests from '${cpath}' loaded`); - }); - return mapping; -} - -function setup() { - describe('Load modules with tests for consumers', () => { - // should be loaded at the beginning of process - pullConsumersMap = loadPullConsumers(); - }); -} - -function test() { - const methodsToCall = ['test']; - - describe('Pull Consumer Tests', () => { - // consumers tests should be loaded already - Object.keys(pullConsumersMap).forEach((consumer) => { - describe(consumer, () => { - const consumerModule = pullConsumersMap[consumer]; - - methodsToCall.forEach((method) => { - if (consumerModule[method]) { - consumerModule[method].apply(consumerModule); - } else { - util.logger.console.warn(`WARN: Pull Consumer Test "${consumer}" has no '${method}' method to call`); - } - }); - }); - }); - }); -} - -module.exports = { - setup, - test -}; diff --git a/test/functional/pullConsumersTests/defaultTests.js b/test/functional/pullConsumersTests/defaultTests.js deleted file mode 100644 index 58f587db..00000000 --- a/test/functional/pullConsumersTests/defaultTests.js +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for - * license terms. Notwithstanding anything to the contrary in the EULA, Licensee - * may copy and modify this software product for its internal business purposes. - * Further, Licensee may upload, publish and distribute the modified version of - * the software product on devcentral.f5.com. - */ - -// this object not passed with lambdas, which mocha uses - -'use strict'; - -const fs = require('fs'); -const assert = require('assert'); -const constants = require('../shared/constants'); -const DEFAULT_UNNAMED_NAMESPACE = require('../../../src/lib/constants').DEFAULT_UNNAMED_NAMESPACE; -const util = require('../shared/util'); -const dutUtils = require('../dutTests').utils; - -const DUTS = util.getHosts('BIGIP'); - -// read in example configs -const BASIC_DECL = JSON.parse(fs.readFileSync(constants.DECL.PULL_CONSUMER_BASIC)); -const NAMESPACE_DECL = JSON.parse(fs.readFileSync(constants.DECL.PULL_CONSUMER_WITH_NAMESPACE)); - -function test() { - const verifyResponseData = (response) => { - const body = JSON.parse(response.body); - const headers = response.headers; - - assert.strictEqual(body.length, 1); - assert.notStrictEqual( - Object.keys(body[0].system).indexOf('hostname'), - -1, - 'should have \'hostname\' in expected data position' - ); - assert.ok(headers['content-type'].includes('application/json'), 'content-type should include application/json type'); - }; - - describe('Pull Consumer Test: default consumer type - no namespace', () => { - describe('default - Configure TS', () => { - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(BASIC_DECL)) - )); - }); - - describe('default - Tests', () => { - const pullConsumerName = 'My_Pull_Consumer'; - DUTS.forEach((dut) => { - it( - `should get the Pull Consumer's formatted data from: ${dut.hostalias}`, - () => dutUtils.getPullConsumerData(dut, pullConsumerName, { rawResponse: true }) - .then((response) => { - verifyResponseData(response); - }) - ); - - it( - `should get the Pull Consumer's formatted data from: ${dut.hostalias} (using namespace endpoint)`, - () => dutUtils.getPullConsumerData(dut, pullConsumerName, { - namespace: DEFAULT_UNNAMED_NAMESPACE, - rawResponse: true - }) - .then((response) => { - verifyResponseData(response); - }) - ); - }); - }); - }); - - describe('Pull Consumer Test: default consumer type - with namespace', () => { - describe('default with namespace - Configure TS', () => { - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(NAMESPACE_DECL)) - )); - }); - - describe('default with namespace - Tests', () => { - const pullConsumerName = 'Pull_Consumer'; - const namespace = 'Second_Namespace'; - DUTS.forEach((dut) => { - it( - `should get the Pull Consumer's formatted data from: ${dut.hostalias}`, - () => dutUtils.getPullConsumerData(dut, pullConsumerName, { namespace, rawResponse: true }) - .then((response) => { - verifyResponseData(response); - }) - ); - }); - }); - }); -} - -module.exports = { - test -}; diff --git a/test/functional/pullConsumersTests/prometheusTests.js b/test/functional/pullConsumersTests/prometheusTests.js deleted file mode 100644 index d6e21462..00000000 --- a/test/functional/pullConsumersTests/prometheusTests.js +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for - * license terms. Notwithstanding anything to the contrary in the EULA, Licensee - * may copy and modify this software product for its internal business purposes. - * Further, Licensee may upload, publish and distribute the modified version of - * the software product on devcentral.f5.com. - */ - -// this object not passed with lambdas, which mocha uses - -'use strict'; - -const fs = require('fs'); -const assert = require('assert'); -const constants = require('../shared/constants'); -const DEFAULT_UNNAMED_NAMESPACE = require('../../../src/lib/constants').DEFAULT_UNNAMED_NAMESPACE; -const util = require('../shared/util'); -const dutUtils = require('../dutTests').utils; - -const DUTS = util.getHosts('BIGIP'); - -// read in example config -const BASIC_DECL = JSON.parse(fs.readFileSync(constants.DECL.PULL_CONSUMER_BASIC)); -const NAMESPACE_DECL = JSON.parse(fs.readFileSync(constants.DECL.PULL_CONSUMER_WITH_NAMESPACE)); -const PROMETHEUS_PULL_CONSUMER_TYPE = 'Prometheus'; -const PROMETHEUS_CONTENT_TYPE = 'text/plain; version=0.0.4; charset=utf-8'; - -function test() { - const verifyResponseData = (response) => { - const body = response.body; - const headers = response.headers; - - assert.notStrictEqual( - body.indexOf('# HELP f5_counters_bitsIn counters.bitsIn'), - -1, - 'help text should exist, and contain original metric name' - ); - assert.notStrictEqual( - body.indexOf('f5_counters_bitsIn{networkInterfaces="mgmt"}'), - -1, - 'metric should include label with label value' - ); - assert.notStrictEqual( - body.indexOf('f5_system_tmmTraffic_serverSideTraffic_bitsIn'), - -1, - 'metrics without labels should store path in metric name' - ); - assert.notStrictEqual( - body.match(/(f5_system_memory )[0-9]{1,2}\n/), - null, - 'metric\'s value should only be a numeric, followed by a newline' - ); - assert.strictEqual(headers['content-type'], PROMETHEUS_CONTENT_TYPE, 'content-type should be of type text/plain'); - }; - - describe('Pull Consumer Test: Prometheus - no namespace', () => { - const pullConsumerName = 'My_Pull_Consumer'; - before(() => { - // Update Declaration to reference Prometheus - BASIC_DECL[pullConsumerName].type = PROMETHEUS_PULL_CONSUMER_TYPE; - }); - describe('Prometheus - Configure TS', () => { - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(BASIC_DECL)) - )); - }); - - describe('Prometheus - Tests', () => { - DUTS.forEach((dut) => { - it( - `should the Pull Consumer's formatted data from: ${dut.hostalias}`, - () => dutUtils.getPullConsumerData(dut, pullConsumerName, { rawResponse: true }) - .then((response) => { - verifyResponseData(response); - }) - ); - - it( - `should the Pull Consumer's formatted data from: ${dut.hostalias} (using namespace endpoint)`, - () => dutUtils.getPullConsumerData(dut, pullConsumerName, { - namespace: DEFAULT_UNNAMED_NAMESPACE, - rawResponse: true - }) - .then((response) => { - verifyResponseData(response); - }) - ); - }); - }); - }); - - describe('Pull Consumer Test: Prometheus - with namespace', () => { - const pullConsumerName = 'Pull_Consumer'; - const namespace = 'Second_Namespace'; - - before(() => { - // Update Declaration to reference Prometheus - NAMESPACE_DECL[namespace][pullConsumerName].type = PROMETHEUS_PULL_CONSUMER_TYPE; - }); - describe('Prometheus with namespace - Configure TS', () => { - DUTS.forEach((dut) => it( - `should configure TS - ${dut.hostalias}`, - () => dutUtils.postDeclarationToDUT(dut, util.deepCopy(NAMESPACE_DECL)) - )); - }); - - describe('Prometheus with namespace - Tests', () => { - DUTS.forEach((dut) => { - it( - `should the Pull Consumer's formatted data from: ${dut.hostalias}`, - () => dutUtils.getPullConsumerData(dut, pullConsumerName, { namespace, rawResponse: true }) - .then((response) => { - verifyResponseData(response); - }) - ); - }); - }); - }); -} - -module.exports = { - test -}; diff --git a/test/functional/shared/azureUtil.js b/test/functional/shared/azureUtil.js deleted file mode 100644 index 05728935..00000000 --- a/test/functional/shared/azureUtil.js +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for - * license terms. Notwithstanding anything to the contrary in the EULA, Licensee - * may copy and modify this software product for its internal business purposes. - * Further, Licensee may upload, publish and distribute the modified version of - * the software product on devcentral.f5.com. - */ - -'use strict'; - -const util = require('./util'); - -function getOAuthToken(clientId, clientSecret, tenantId, cloudType) { - const loginDomain = cloudType === 'gov' ? 'login.microsoftonline.us' : 'login.microsoftonline.com'; - const resource = cloudType === 'gov' ? 'https://api.loganalytics.us/' : 'https://api.loganalytics.io/'; - const options = { - method: 'POST', - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - body: [ - 'grant_type=client_credentials', - `client_id=${clientId}`, - `redirect_uri=https://${loginDomain}/common/oauth2/nativeclient`, - `client_secret=${encodeURIComponent(clientSecret)}`, - `resource=${resource}` - ].join('&') - }; - return util.makeRequest( - loginDomain, - `/${tenantId}/oauth2/token`, - options - ); -} - -function queryLogs(oauthToken, workspaceId, queryString, cloudType) { - const apiDomain = cloudType === 'gov' ? 'api.loganalytics.us' : 'api.loganalytics.io'; - const options = { - method: 'POST', - headers: { - Authorization: `Bearer ${oauthToken}`, - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ query: queryString }) - - }; - return util.makeRequest( - apiDomain, - `/v1/workspaces/${workspaceId}/query`, - options - ); -} - -function queryAppInsights(appId, apiKey, cloudType) { - const apiDomain = cloudType === 'gov' ? 'api.applicationinsights.us' : 'api.applicationinsights.io'; - const options = { - headers: { - 'x-api-key': apiKey - } - }; - return util.makeRequest( - apiDomain, - `/v1/apps/${appId}/metrics/customMetrics/F5_system_tmmMemory?timespan=PT3M`, - options - ); -} - -module.exports = { - getOAuthToken, - queryLogs, - queryAppInsights -}; diff --git a/test/functional/shared/cloudUtils/aws.js b/test/functional/shared/cloudUtils/aws.js new file mode 100644 index 00000000..c788dfe9 --- /dev/null +++ b/test/functional/shared/cloudUtils/aws.js @@ -0,0 +1,146 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const AWS = require('aws-sdk'); + +const constants = require('../constants'); +const miscUtils = require('../utils/misc'); + +/** + * @module test/functional/shared/cloudUtils/aws + */ + +/** + * Configure AWS global config + * + * @param {AWSCloudMetadata} metadata + */ +function configureAWSGlobal(metadata) { + AWS.config.update({ + accessKeyId: metadata.accessKey.id, + region: metadata.region, + secretAccessKey: metadata.accessKey.secret + }); +} + +/** + * Gather info about harness from process.env + * + * @public + * + * @returns {Array} + */ +function getCloudHarnessJSON() { + const instance = getEnvFile().instances[0]; + return [{ + admin_ip: instance.mgmt_address, + f5_hostname: `bigip_${instance.instanceId}.hostname`, + f5_rest_api_port: instance.mgmt_port, + f5_rest_user: { + username: instance.admin_username, + password: instance.admin_password + }, + f5_validate_certs: false, + is_f5_device: true, + ssh_port: constants.CLOUD.AWS.BIGIP.SSH.DEFAULT_PORT, + ssh_user: { + username: instance.admin_username, + password: instance.admin_password + } + }]; +} + +/** + * Get Azure API Cloud metadata from process env + * + * @public + * + * @returns {Promise} + */ +function getCloudMetadataFromProcessEnv() { + return new Promise((resolve) => { + const envData = getEnvFile(); + resolve({ + accessKey: { + id: miscUtils.getEnvArg(constants.ENV_VARS.AWS.ACCESS_KEY_ID), + secret: miscUtils.getEnvArg(constants.ENV_VARS.AWS.ACCESS_KEY_SECRET) + }, + bucket: envData.bucket, + metricNamespace: miscUtils.getEnvArg(constants.ENV_VARS.AWS.METRIC_NAMESPACE), + region: envData.region + }); + }); +} + +/** + * @public + * + * @returns {AWS.CloudWatch} instance + */ +function getCloudWatchClient() { + return new AWS.CloudWatch({ apiVersion: '2010-08-01' }); +} + +/** + * Read and parse AWS Cloud Env file + * + * @returns {AWSCloudEnvMetadata} + */ +function getEnvFile() { + const filePath = miscUtils.getEnvArg(constants.ENV_VARS.AWS.HARNESS_FILE); + return miscUtils.readJsonFile(filePath, false); +} + +/** + * @public + * + * @returns {AWS.S3} instance + */ +function getS3Client() { + return new AWS.S3({ apiVersion: '2006-03-01' }); +} + +module.exports = { + configureAWSGlobal, + getCloudHarnessJSON, + getCloudMetadataFromProcessEnv, + getCloudWatchClient, + getS3Client +}; + +/** + * @typedef AWSCloudInstance + * @type {Object} + * @property {string} admin_password + * @property {string} admin_username + * @property {string} instanceId + * @property {string} mgmt_address + * @property {string} mgmt_port + * @property {integer} mgmt_port + */ +/** + * @typedef AWSCloudEnvMetadata + * @type {Object} + * @property {string} bucket + * @property {string} deploymentId + * @property {string} environment + * @property {Array} instances + * @property {string} region + */ +/** + * @typedef AWSCloudMetadata + * @type {Object} + * @property {Object} accessKey + * @property {string} accessKey.id + * @property {string} accessKey.secret + * @property {string} bucket + * @property {string} metricNamespace + * @property {string} region + */ diff --git a/test/functional/shared/cloudUtils/azure.js b/test/functional/shared/cloudUtils/azure.js new file mode 100644 index 00000000..295216ea --- /dev/null +++ b/test/functional/shared/cloudUtils/azure.js @@ -0,0 +1,276 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const constants = require('../constants'); +const logger = require('../utils/logger').getChild('azureUtils'); +const miscUtils = require('../utils/misc'); +const request = require('../utils/request'); + +/** + * @module test/functional/shared/cloudUtils/azure + */ + +const AZURE = { + GOV: { + appInsights: { + api: 'api.applicationinsights.us' + }, + logAnalytics: { + api: 'api.loganalytics.us' + }, + login: 'login.microsoftonline.us', + resource: 'https://api.loganalytics.us/' + }, + PUBLIC: { + appInsights: { + api: 'api.applicationinsights.io' + }, + logAnalytics: { + api: 'api.loganalytics.io' + }, + login: 'login.microsoftonline.com', + resource: 'https://api.loganalytics.io/' + } +}; + +const SERVICE_TYPE = Object.freeze({ + AI: 'AppInsights', + LA: 'LogAnalytics' +}); + +/** + * Get Azure constants by cloud type + * + * @private + * + * @param {string} [cloudType = 'PUBLIC'] + * + * @returns {Object} + */ +function getAzure(cloudType) { + return AZURE[typeof cloudType === 'string' && cloudType.toLowerCase() === 'gov' ? 'GOV' : 'PUBLIC']; +} + +/** + * Gather info about harness from process.env + * + * @public + * + * @returns {Array} + */ +function getCloudHarnessJSON() { + return [{ + admin_ip: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.VM_IP), + f5_hostname: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.VM_HOSTNAME), + f5_rest_api_port: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.VM_PORT, { + defaultValue: constants.CLOUD.AZURE.BIGIP.REST_API.DEFAULT_PORT + }), + f5_rest_user: { + username: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.VM_USER, { + defaultValue: constants.CLOUD.AZURE.BIGIP.REST_API.DEFAULT_USER + }), + password: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.VM_PWD) + }, + f5_validate_certs: false, + is_f5_device: true, + ssh_port: constants.CLOUD.AZURE.BIGIP.SSH.DEFAULT_PORT, + ssh_user: { + username: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.VM_USER, { + defaultValue: constants.CLOUD.AZURE.BIGIP.REST_API.DEFAULT_USER + }), + password: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.VM_PWD) + } + }]; +} + +/** + * Get Azure API Cloud metadata from process env + * + * @public + * + * @param {'LogAnalytics' | 'AppInsights'} serviceType - service type + * + * @returns {Promise} + */ +function getCloudMetadataFromProcessEnv(serviceType) { + return new Promise((resolve) => { + if (serviceType === SERVICE_TYPE.LA) { + resolve({ + clientID: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.CLIENT_ID), + cloudType: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.CLOUD_TYPE, { + defaultValue: 'PUBLIC' + }), + logKey: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.LOG_KEY), + tenant: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.TENANT), + workspace: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.WORKSPACE_MI) + }); + } else { + resolve({ + apiKey: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.APPINS_API_KEY), + appID: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.APPINS_APP_ID), + cloudType: miscUtils.getEnvArg(constants.ENV_VARS.AZURE.CLOUD_TYPE, { + defaultValue: 'PUBLIC' + }) + }); + } + }); +} + +/** + * Get Azure API metadata from process env + * + * @public + * + * @param {'LogAnalytics' | 'AppInsights'} serviceType - service type + * + * @returns {Promise>} + */ +function getMetadataFromProcessEnv(serviceType) { + return new Promise((resolve, reject) => { + const envVar = serviceType === SERVICE_TYPE.LA + ? constants.ENV_VARS.AZURE.LA_API_DATA + : constants.ENV_VARS.AZURE.APPINS_API_DATA; + + logger.info('Reading Azure API data env variable', { envVar, serviceType }); + + const apiDataFilePath = miscUtils.getEnvArg(envVar); + logger.info('Reading Azure API data from file', { apiDataFilePath, serviceType }); + + miscUtils.readJsonFile(apiDataFilePath, true) + .then(resolve, reject); + }); +} + +/** + * Get auth token + * + * @public + * + * @param {string} clientId + * @param {string} clientSecret + * @param {string} tenantId + * @param {'GOV' | 'PUBLIC'} [cloudType = 'PUBLIC'] + * + * @returns {Promise} resolved with access token + */ +function getOAuthToken(clientId, clientSecret, tenantId, cloudType) { + const azure = getAzure(cloudType); + return request({ + body: [ + 'grant_type=client_credentials', + `client_id=${clientId}`, + `redirect_uri=https://${azure.login}/common/oauth2/nativeclient`, + `client_secret=${encodeURIComponent(clientSecret)}`, + `resource=${azure.resource}` + ].join('&'), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + host: azure.login, + json: false, + method: 'POST', + port: 443, + protocol: 'https', + uri: `/${tenantId}/oauth2/token` + }) + .then((data) => data.access_token); +} + +/** + * Query Application Insights service + * + * @public + * + * @param {string} appId + * @param {string} apiKey + * @param {'GOV' | 'PUBLIC'} [cloudType = 'PUBLIC'] + * + * @returns {Promise} + */ +function queryAppInsights(appId, apiKey, cloudType) { + const azure = getAzure(cloudType); + return request({ + headers: { + 'x-api-key': apiKey + }, + host: azure.appInsights.api, + port: 443, + protocol: 'https', + uri: `/v1/apps/${appId}/metrics/customMetrics/F5_system_tmmMemory?timespan=PT5M` + }); +} + +/** + * Query Azure Log Analytics service + * + * @public + * + * @param {string} oauthToken + * @param {string} workspaceId + * @param {string} queryString + * @param {'GOV' | 'PUBLIC'} [cloudType = 'PUBLIC'] + * + * @returns {Promise} + */ +function queryLogs(oauthToken, workspaceId, queryString, cloudType) { + const azure = getAzure(cloudType); + return request({ + body: { query: queryString }, + headers: { + Authorization: `Bearer ${oauthToken}`, + 'Content-Type': 'application/json' + }, + host: azure.logAnalytics.api, + json: true, + method: 'POST', + port: 443, + protocol: 'https', + uri: `/v1/workspaces/${workspaceId}/query` + }); +} + +module.exports = { + getOAuthToken, + getCloudHarnessJSON, + getCloudMetadataFromProcessEnv, + getMetadataFromProcessEnv, + queryLogs, + queryAppInsights, + SERVICE_TYPE +}; + +/** + * @typedef AzureAppInsightsMetadata + * @type {Object} + * @property {string} apiKey + * @property {string} appID + * @property {string} instrKey + * @property {string} region + */ +/** + * @typedef AzureAppInsightsCloudMetadata + * @type {AzureLAMetadata} + * @property {string} cloudType + * @property {string} [instrKey] + * @property {string} [region] + */ +/** + * @typedef AzureLAMetadata + * @type {Object} + * @property {string} clientID + * @property {string} logKey + * @property {string} passphrase + * @property {string} tenant + * @property {string} workspace + */ +/** + * @typedef AzureLACloudMetadata + * @type {AzureLAMetadata} + * @property {string} cloudType + * @property {string} [passphrase] + */ diff --git a/test/functional/shared/cloudUtils/gcp.js b/test/functional/shared/cloudUtils/gcp.js new file mode 100644 index 00000000..b94f49f9 --- /dev/null +++ b/test/functional/shared/cloudUtils/gcp.js @@ -0,0 +1,153 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const jwt = require('jsonwebtoken'); + +const constants = require('../constants'); +const logger = require('../utils/logger').getChild('gcpUtils'); +const miscUtils = require('../utils/misc'); +const request = require('../utils/request'); + +/** + * @module test/functional/shared/cloudUtils/gcp + */ + +const GCP = { + PUBLIC: { + login: 'oauth2.googleapis.com', + monitoring: 'monitoring.googleapis.com', + scope: 'https://www.googleapis.com/auth/monitoring' + } +}; + +/** + * Get GCP constants by cloud type + * + * @private + * + * @param {string} cloudType + * + * @returns {Object} + */ +function getGCP() { + return GCP.PUBLIC; +} + +/** + * Get GCP API metadata from process env + * + * @public + * + * @returns {Promise} + */ +function getMetadataFromProcessEnv() { + return new Promise((resolve, reject) => { + const envVar = constants.ENV_VARS.GCP.CM_API_DATA; + logger.info('Reading GCP Cloud Monitoring API data env variable', { envVar }); + + const apiDataFilePath = miscUtils.getEnvArg(envVar); + logger.info('Reading GCP Cloud Monitoring API data from file', { apiDataFilePath }); + + miscUtils.readJsonFile(apiDataFilePath, true) + .then(resolve, reject); + }); +} + +/** + * Get auth token + * + * @public + * + * @param {string} serviceEmail + * @param {string} privateKey + * @param {string} privateKeyID + * @param {'GOV' | 'PUBLIC'} [cloudType = 'PUBLIC'] + * + * @returns {Promise} resolved with access token + */ +function getOAuthToken(serviceEmail, privateKey, privateKeyID, cloudType) { + const gcp = getGCP(cloudType); + const port = 443; + const protocol = 'https'; + const uri = '/token'; + const newJwt = jwt.sign( + { + iss: serviceEmail, + scope: gcp.scope, + aud: `${protocol}://${gcp.login}${uri}`, + exp: Math.floor(Date.now() / 1000) + 3600, + iat: Math.floor(Date.now() / 1000) + }, + privateKey, + { + algorithm: 'RS256', + header: { + kid: privateKeyID, + typ: 'JWT', + alg: 'RS256' + } + } + ); + return request({ + form: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: newJwt + }, + headers: { + 'Content-Type': 'application/x-www-form-urlencoded' + }, + host: gcp.login, + method: 'POST', + port, + protocol, + uri: '/token' + }) + .then((response) => response.access_token); +} + +/** + * Query Google Cloud Monitoring + * + * @public + * + * @param {string} accessToken + * @param {string} projectID + * @param {string} query + * @param {'GOV' | 'PUBLIC'} [cloudType = 'PUBLIC'] + * + * @returns {Promise} query results + */ +function queryCloudMonitoring(accessToken, projectID, query, cloudType) { + const gcp = getGCP(cloudType); + return request({ + headers: { + Authorization: `Bearer ${accessToken}` + }, + host: gcp.monitoring, + port: 443, + protocol: 'https', + uri: `/v3/projects/${projectID}/timeSeries?${query}` + }); +} + +module.exports = { + getOAuthToken, + getMetadataFromProcessEnv, + queryCloudMonitoring +}; + +/** + * @typedef GCPMetadata + * @type {Object} + * @property {string} privateKey + * @property {string} privateKeyID + * @property {string} projectID + * @property {string} serviceEmail + */ diff --git a/test/functional/shared/connectors/dockerConnector.js b/test/functional/shared/connectors/dockerConnector.js new file mode 100644 index 00000000..0d59d7d6 --- /dev/null +++ b/test/functional/shared/connectors/dockerConnector.js @@ -0,0 +1,375 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const getArgByType = require('../utils/misc').getArgByType; + +/** + * @module test/functional/shared/connectors/dockerConnector + * + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("../remoteHost/sshConnector").SSHConnector} SSHConnector + * @typedef {import("../remoteHost/sshConnector").SSHExecResponse} SSHExecResponse + */ + +const DOCKER_SHORT_ID_LEN = 12; + +const RUN_OPTS_MAP = { + // boolean + detach: (val) => (val ? '-d' : ''), + // object + env: (envVars) => Object.keys(envVars) + .map((varName) => { + const varVal = envVars[varName]; + return varVal === null + ? `--env ${varName}` + : `--env ${varName}=${varVal}`; + }) + .join(' '), + // string + name: (val) => `--name=${val}`, + // object + publish: (ports) => Object.keys(ports) + .map((hostPort) => `-p ${hostPort}:${ports[hostPort]}`) + .join(' '), + // string + restart: (val) => `--restart=${val}`, + // object + volume: (volumes) => Object.keys(volumes) + .map((hostVol) => `-v ${hostVol}:${volumes[hostVol]}`) + .join(' ') +}; + +/** + * Truncate container ID to N chars + * + * @param {string} containerID - container ID + * + * @returns {string} truncated container ID + */ +function truncateID(containerID, numOfChars) { + return containerID.slice(0, arguments.length > 1 ? numOfChars : DOCKER_SHORT_ID_LEN); +} + +/** + * Docker connector + */ +class DockerConnector { + /** + * Constructor + * + * @param {SSHConnector} ssh - SSH Connector + * @param {Object} [options] - options + * @param {Logger} [options.logger] - logger + */ + constructor(ssh, options) { + Object.defineProperties(this, { + ssh: { + value: ssh + } + }); + + options = options || {}; + this.logger = (options.logger || this.ssh.logger).getChild('docker'); + } + + /** + * Build command for 'docker run' + * + * @param {DockerCommandOptions} command - command options + * + * @returns {string} command + */ + buildRunCmd(command) { + const parts = []; + Object.keys(command).forEach((opt) => { + if (typeof RUN_OPTS_MAP[opt] !== 'undefined') { + parts.push(RUN_OPTS_MAP[opt](command[opt])); + } + }); + return parts.join(' '); + } + + /** + * List running containers + * + * @param {boolean} all - list all containers + * + * @returns {Promise>} resolved with list of container info + */ + containers(all) { + this.logger.info('Request to list containers', { all }); + all = all ? ' -a' : ''; + return this.exec(`container list --format "{{json .}}" -q${all}`) + .then((ret) => (ret.stdout + ? ret.stdout + .split('\n') + .filter((id) => id.trim()) + .map(JSON.parse) + .map((containerInfo) => ({ + id: containerInfo.ID, + name: containerInfo.Names + })) + : [])); + } + + /** + * Get log for container + * + * @param {string} container - container ID or name + * + * @returns {Promise} resolved with logs + */ + containerLogs(container) { + this.logger.info('Request to get container logs', { container }); + return this.exec(`container logs ${container}`); + } + + /** + * Execute docker command + * + * @param {string} command - command to execute + * @param {PromiseRetryOptions} [retryOptions] - retry options + * @param {boolean} [rejectOnError = true] - reject on non-zero RC + * + * @returns {Promise} execution results + */ + exec(command, retryOptions, rejectOnError) { + rejectOnError = true; + if (arguments.length > 1) { + rejectOnError = getArgByType(arguments, 'boolean', { fromIndex: 1, defaultValue: rejectOnError }).value; + retryOptions = getArgByType(arguments, 'object', { fromIndex: 1 }).value; + } + return this.ssh.exec(`docker ${command}`, null, retryOptions) + .then((ret) => { + if (ret.code !== 0) { + this.logger.info('Non-zero return code', ret); + if (rejectOnError) { + const execErr = new Error('Docker command execution error: non-zero return code!'); + execErr.ret = ret; + return Promise.reject(execErr); + } + } + return Promise.resolve(ret); + }); + } + + /** + * List images + * + * @param {boolean} all - list all images + * + * @returns {Promise>} resolved with list of image info + */ + images(all) { + this.logger.info('Request to list images', { all }); + all = all ? ' -a' : ''; + return this.exec(`images --format "{{json .}}" -q${all}`) + .then((ret) => (ret.stdout + ? ret.stdout + .split('\n') + .filter((id) => id.trim()) + .map(JSON.parse) + .map((imageInfo) => ({ + id: imageInfo.ID, + repo: imageInfo.Repository, + tag: imageInfo.Tag + })) + : [])); + } + + /** + * Install docker + * + * @returns {Promise} resolved once installed + */ + install() { + this.logger.info('Request to install docker'); + return this.ssh.exec('curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh') + .then((ret) => (ret.code === 0 + ? Promise.resolve() + : Promise.reject(new Error('Unable to install docker')))); + } + + /** + * Check if docker installed + * + * @returns {Promise} true when installed + */ + installed() { + return this.exec('docker --version', false) + .then((ret) => ret.code === 0); + } + + /** + * Prune all systems + * + * @returns {Promise} resolved once all systems pruned + */ + pruneSystems() { + this.logger.info('Request to prune all systems'); + return this.exec('system prune -f'); + } + + /** + * Prune all volumes + * + * @returns {Promise} resolved once all volumes pruned + */ + pruneVolumes() { + this.logger.info('Request to prune all volumes'); + return this.exec('volume prune -f'); + } + + /** + * Pull image + * + * @param {string} image - image + * + * @returns {Promise} resolve once image pulled + */ + pull(image) { + this.logger.info('Request to pull an image'); + return this.exec(`pull ${image}`); + } + + /** + * Remove all systems, volumes, containers, images + * + * @returns {Promise} resolved once all actions done + */ + removeAll() { + this.logger.info('Request to cleanup all components of docker'); + return this.removeAllContainers() + .then(() => this.removeAllImages()) + .then(() => this.pruneSystems()) + .then(() => this.pruneVolumes()); + } + + /** + * Remove all containers + * + * @returns {Promise} resolved once all containers removed + */ + removeAllContainers() { + this.logger.info('Request to remove all containers'); + return this.containers(true) + .then((containers) => (containers.length + ? this.exec(`rm -f ${containers.map((c) => c.id).join(' ')}`) + : Promise.resolve(null))); + } + + /** + * Remove all images + * + * @returns {Promise} resolved once all images removed + */ + removeAllImages() { + this.logger.info('Request to remove all images'); + return this.images(true) + .then((images) => (images.length + ? this.exec(`rmi -f ${images.map((im) => im.id).join(' ')}`) + : Promise.resolve(null))); + } + + /** + * Remove container(s) + * + * @param {Array | string} containers - containers to remove + * + * @returns {Promise} resolved once container(s) removed + */ + removeContainer(containers) { + this.logger.info('Request to remove container(s)', { containers }); + containers = Array.isArray(containers) + ? containers + : [containers]; + + return this.exec(`container rm -v -f ${containers.join(' ')}`); + } + + /** + * Run a container + * + * @param {DockerCommandOptions} command - command options + * @param {string} command.image - image name + * @param {string} [command.command] - command to run + * @param {Array} [command.args] - args to pass to command + * @param {boolean} [rejectOnError = true] - reject on non-zero RC + * + * @returns {Promise} resolved once operation completed. Returns a container ID + * when container was run in detached mode. + */ + run(command, rejectOnError) { + rejectOnError = true; + if (arguments.length > 1) { + rejectOnError = getArgByType(arguments, 'boolean', { fromIndex: 1, defaultValue: rejectOnError }).value; + } + + this.logger.info('Request to run a container'); + const cmd = [ + this.buildRunCmd(command), + command.image, + command.command + ] + .filter((c) => c) + .concat(command.args || []) + .join(' '); + + return this.exec(`run ${cmd}`, rejectOnError) + .then((ret) => { + if (command.detach && ret.code === 0) { + return Promise.resolve(truncateID(ret.stdout.split('\n')[0])); + } + return Promise.resolve(ret); + }); + } + + /** + * Stop container(s) + * + * @param {Array | string} containers - containers to stop + * + * @returns {Promise} resolved once container(s) stopped + */ + stopContainer(containers) { + this.logger.info('Request to stop container(s)', { containers }); + containers = Array.isArray(containers) + ? containers + : [containers]; + + return this.exec(`container stop ${containers.join(' ')}`); + } + + /** + * Get docker version + * + * @returns {Promise} resolved with version info + */ + version() { + this.logger.info('Request to get version info'); + return this.exec('docker --version') + .then((ret) => ret.stdout); + } +} + +module.exports = { + DockerConnector +}; + +/** + * @typedef DockerCommandOptions + * @type {Object} + * @property {boolean} [detach] - run container in background and print container ID + * @property {Object} [env] - set environment variables + * @property {string} [name] - assign a name to the container + * @property {Object} [volume] - bind mount a volume + */ diff --git a/test/functional/shared/connectors/telemetryConnector.js b/test/functional/shared/connectors/telemetryConnector.js new file mode 100644 index 00000000..de6ad56c --- /dev/null +++ b/test/functional/shared/connectors/telemetryConnector.js @@ -0,0 +1,255 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const DEFAULT_NAMESPACE = require('../constants').TELEMETRY.NAMESPACE.DEFAULT; +const getArgByType = require('../utils/misc').getArgByType; +const URI_PREFIX = require('../constants').TELEMETRY.API.URI.PREFIX; + +/** + * @module test/functional/shared/connectors/telemetryConnector + * + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("../remoteHost/icontrolConnector").IControlConnector} IControlConnector + */ + +const PRIVATES = new WeakMap(); + +/** + * Telemetry Streaming Connector + * + * @property {boolean} forceNamespace - force namespace usage even when default one + * @property {IControlConnector} icontrol - iControl Connector + * @property {string} namespace - Telemetry Streaming namespace + */ +class TelemetryStreamingConnector { + /** + * Constructor + * + * @param {IControlConnector} icontrol - iControl Connector + * @param {Object} [options] - options + * @param {Logger} [options.logger] - logger + * @param {string} [options.namespace] - Telemetry Streaming namespace + */ + constructor(icontrol, options) { + Object.defineProperties(this, { + forceNamespace: { + value: !!options.forceNamespace || false + }, + icontrol: { + value: icontrol + }, + namespace: { + value: options.namespace || DEFAULT_NAMESPACE + } + }); + + options = options || {}; + this.logger = (options.logger || this.icontrol.logger).getChild(`f5-telemetry@${this.namespace}`); + + PRIVATES.set(this, { + parentLogger: options.logger || this.icontrol.logger + }); + } + + /** + * Build URI for request + * + * @param {string} uri - URI + * @param {boolean} [ignoreNamespace = false] - ignore configured namespace + * + * @returns {string} URI + */ + buildURI(uri, ignoreNamespace) { + uri = uri[0] === '/' ? uri : `/${uri}`; + if (!ignoreNamespace && (this.forceNamespace || this.namespace !== DEFAULT_NAMESPACE)) { + uri = `/namespace/${this.namespace}${uri}`; + } + return `${URI_PREFIX}${uri}`; + } + + /** + * Post declaration + * + * @param {Object} declaration - declaration + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved once declaration posted + */ + declare(declaration, retry) { + return this.icontrol.makeRequestWithAuth({ + body: declaration, + json: true, + method: 'POST', + retry, + uri: this.buildURI('declare') + }); + } + + /** + * Get declaration + * + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved once declaration received + */ + getDeclaration(retry) { + return this.icontrol.makeRequestWithAuth({ + method: 'GET', + retry, + uri: this.buildURI('declare') + }); + } + + /** + * Get iHealth Data + * + * @param {string} [system] - system + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved once data received + */ + getIHealthData(system, retry) { + system = ''; + if (arguments.length > 0) { + system = getArgByType(arguments, 'string', { defaultValue: system }).value; + retry = getArgByType(arguments, 'object').value; + } + system = system ? `/${system}` : ''; + return this.icontrol.makeRequestWithAuth({ + method: 'GET', + retry, + uri: this.buildURI(`ihealthpoller${system}`) + }); + } + + /** + * Get data from pull consumer + * + * @param {string} consumer - consumer + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise>} resolved once data received + */ + getPullConsumerData(consumer, retry) { + return this.icontrol.makeRequestWithAuth({ + includeResponseObject: true, + method: 'GET', + retry, + uri: this.buildURI(`pullconsumer/${consumer}`) + }); + } + + /** + * Get data from system poller + * + * @param {string} system - system + * @param {string} [poller] - poller + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved once data received + */ + getSystemPollerData(system, poller, retry) { + poller = ''; + if (arguments.length > 1) { + poller = getArgByType(arguments, 'string', { fromIndex: 1, defaultValue: poller }).value; + retry = getArgByType(arguments, 'object', { fromIndex: 1 }).value; + } + poller = poller ? `/${poller}` : ''; + return this.icontrol.makeRequestWithAuth({ + method: 'GET', + retry, + uri: this.buildURI(`systempoller/${system}${poller}`) + }); + } + + /** + * Check if f5-telemetry installed on BIG-IP + * + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved with true when installed + */ + installed(retry) { + this.logger.info('Request to check f5-telemetry installation'); + return this.icontrol.makeRequestWithAuth({ + continueOnErrorCode: true, + expectedResponseCode: [200], + includeResponseObject: true, + method: 'GET', + retry, + uri: this.buildURI('info', true) + }) + .then((res) => res[1].statusCode === 200); + } + + /** + * Send test event to Event Listener + * + * @param {any} data - data + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved once data posted + */ + sendEvent(data, retry) { + return this.icontrol.makeRequestWithAuth({ + body: data, + json: typeof data !== 'string', + method: 'POST', + retry, + uri: this.buildURI('eventlistener') + }); + } + + /** + * Get new instance of TelemetryStreamingConnector for 'namespace' + * + * @param {string} namespace - namespace + * @param {boolean} [force = false] - force namespace usage even when default one + * + * @returns {TelemetryStreamingConnector} instance + */ + toNamespace(namespace, force) { + return new TelemetryStreamingConnector(this.icontrol, { + forceNamespace: force, + logger: PRIVATES.get(this).parentLogger, + namespace + }); + } + + /** + * Get application version + * + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved with version info + */ + version(retry) { + this.logger.info('Request to get f5-telemetry version '); + return this.icontrol.makeRequestWithAuth({ + method: 'GET', + retry, + uri: this.buildURI('info', true) + }); + } +} + +module.exports = { + TelemetryStreamingConnector +}; + +/** + * @typedef TSInfo + * @type {Object} + * @property {string} nodeVersion - node.js version on a host + * @property {string} version - Telemetry Streaming version + * @property {string} release - Telemetry Streaming release number + * @property {string} schemaCurrent - Telemetry Streaming schema current version + * @property {string} schemaMinimum - Telemetry Streaming schema minimum supported version + */ diff --git a/test/functional/shared/constants.js b/test/functional/shared/constants.js index af82cbba..0e086514 100644 --- a/test/functional/shared/constants.js +++ b/test/functional/shared/constants.js @@ -10,42 +10,27 @@ const fs = require('fs'); +/** + * @module test/functional/shared/constants + */ + +/** + * Add trailing char to the string if not set + * + * @param {string} str + * @param {string} char + * + * @returns {string} + */ +function trailingChar(str, char) { + if (!str.endsWith(char)) { + str = `${str}${char}`; + } + return str; +} + module.exports = { - DECL: { - BASIC: `${__dirname}/basic.json`, - BASIC_NAMESPACE: `${__dirname}/basic_namespace.json`, - FILTER: `${__dirname}/filter_system_poller.json`, - ACTION_CHAINING: `${__dirname}/system_poller_chained_actions.json`, - FILTERING_WITH_MATCHING: `${__dirname}/system_poller_matched_filtering.json`, - ENDPOINTLIST: `${__dirname}/system_poller_endpointlist.json`, - PULL_CONSUMER_BASIC: `${__dirname}/pull_consumer_basic.json`, - PULL_CONSUMER_WITH_NAMESPACE: `${__dirname}/pull_consumer_with_namespace.json`, - CONSUMER_NAME: 'My_Consumer', - SYSTEM_NAME: 'My_System', - NAMESPACE_NAME: 'My_Namespace', - SYSTEM_POLLER_SCHEMA: fs.realpathSync(`${__dirname}/../../../shared/output_schemas/system_poller_schema.json`) - }, ENV_VARS: { - ARTIFACTORY_SERVER: 'ARTIFACTORY_SERVER', - TEST_HARNESS: { - FILE: 'TEST_HARNESS_FILE', - IP: 'TEST_HOSTS', - USER: 'TEST_HOSTS_USER', - PWD: 'TEST_HOSTS_PWD' - }, - CONSUMER_HARNESS: { - FILE: 'TEST_HARNESS_FILE', - IP: 'CONSUMER_HOSTS', - USER: 'CONSUMER_HOSTS_USER', - PWD: 'CONSUMER_HOSTS_PWD', - TYPE_REGEX: 'CONSUMER_TYPE_REGEX' - }, - PULL_CONSUMER_HARNESS: { - TYPE_REGEX: 'PULL_CONSUMER_TYPE_REGEX' - }, - CLOUD: { - FILE: 'CLOUD_ENV_FILE' - }, AZURE: { WORKSPACE_MI: 'AZURE_WORKSPACE_MI', WORKSPACE: 'AZURE_WORKSPACE', @@ -58,49 +43,112 @@ module.exports = { VM_IP: 'AZURE_VM_IP', VM_USER: 'AZURE_VM_USER', VM_PWD: 'AZURE_VM_PWD', - APPINS_API_DATA: 'AZURE_APPINS_API', APPINS_API_KEY: 'AZURE_APPINS_API_KEY', APPINS_APP_ID: 'AZURE_APPINS_APP_ID', - CLOUD_TYPE: 'AZURE_CLOUD_TYPE' + CLOUD_TYPE: 'AZURE_CLOUD_TYPE', + LA_API_DATA: 'AZURE_LA_API', // value of env var should be path to a file + APPINS_API_DATA: 'AZURE_APPINS_API' // value of env var should be path to a file }, AWS: { - VM_PORT: 'AWS_VM_PORT', - VM_IP: 'AWS_VM_IP', - VM_USER: 'AWS_VM_USER', - VM_PWD: 'AWS_VM_PWD', - BUCKET: 'AWS_BUCKET', - REGION: 'AWS_REGION', ACCESS_KEY_ID: 'AWS_ACCESS_KEY_ID', ACCESS_KEY_SECRET: 'AWS_SECRET_ACCESS_KEY', + HARNESS_FILE: 'CLOUD_ENV_FILE', METRIC_NAMESPACE: 'AWS_METRIC_NAMESPACE' }, TEST_CONTROLS: { - REUSE_INSTALLED_PACKAGE: 'REUSE_INSTALLED_PACKAGE', - SKIP_DUT_TESTS: 'SKIP_DUT_TESTS', - SKIP_CONSUMER_TESTS: 'SKIP_CONSUMER_TESTS', - SKIP_PULL_CONSUMER_TESTS: 'SKIP_PULL_CONSUMER_TESTS' + CONSUMER: { + EXCLUDE: 'CONSUMER_EXCLUDE_REGEX', + INCLUDE: 'CONSUMER_INCLUDE_REGEX' + }, + DUT: { + EXCLUDE: 'DUT_EXCLUDE_REGEX', + INCLUDE: 'DUT_INCLUDE_REGEX' + }, + HARNESS: { + FILE: 'TEST_HARNESS_FILE' + }, + TESTS: { + SKIP_CONSUMER_TESTS: 'SKIP_CONSUMER_TESTS', + SKIP_DUT_SETUP: 'SKIP_DUT_SETUP', + SKIP_DUT_TEARDOWN: 'SKIP_DUT_TEARDOWN', + SKIP_DUT_TESTS: 'SKIP_DUT_TESTS' + } }, F5_CLOUD: { - SERVICE_ACCOUNT: 'F5_CLOUD_GCP_SERVICE_ACCOUNT' + SERVICE_ACCOUNT: 'F5_CLOUD_GCP_SERVICE_ACCOUNT' // value of env var should be path to a file }, GCP: { - PROJECT_ID: 'GCP_PROJECT_ID', - PRIVATE_KEY_ID: 'GCP_PRIVATE_KEY_ID', - PRIVATE_KEY: 'GCP_PRIVATE_KEY', - SERVICE_EMAIL: 'GCP_SERVICE_EMAIL' + CM_API_DATA: 'GCP_CM_API_DATA' // value of env var should be path to a file } }, - EVENT_LISTENER_DEFAULT_PORT: 6514, // default port - EVENT_LISTENER_SECONDARY_PORT: 56515, - EVENT_LISTENER_NAMESPACE_PORT: 56516, - EVENT_LISTENER_NAMESPACE_SECONDARY_PORT: 56517, - REQUEST: { - PORT: 443, - PROTOCOL: 'https' + ARTIFACTORY_DOCKER_HUB_PREFIX: process.env.ARTIFACTORY_DOCKER_HUB ? trailingChar(process.env.ARTIFACTORY_DOCKER_HUB, '/') : '', + ARTIFACTS_DIR: `${__dirname}/../../artifacts`, + BIGIP: { + RESTNODED: { + LOGS_DIR: '/var/log/restnoded' + } + }, + CLOUD: { + AWS: { + BIGIP: { + SSH: { + DEFAULT_PORT: 2222 + } + } + }, + AZURE: { + BIGIP: { + REST_API: { + DEFAULT_PORT: 8443, + DEFAULT_USER: 'admin' + }, + SSH: { + DEFAULT_PORT: 2222 + } + } + } }, CONSUMERS_DIR: `${__dirname}/../consumersTests`, - PULL_CONSUMERS_DIR: `${__dirname}/../pullConsumersTests`, - ARTIFACTS_DIR: `${__dirname}/../../artifacts`, - BASE_ILX_URI: '/mgmt/shared/telemetry', - ARTIFACTORY_DOCKER_HUB_PREFIX: process.env.ARTIFACTORY_DOCKER_HUB ? `${process.env.ARTIFACTORY_DOCKER_HUB}/` : '' + DECL: { + ACTION_CHAINING: `${__dirname}/data/declarations/system_poller_chained_actions.json`, + BASIC: `${__dirname}/data/declarations/basic.json`, + BASIC_NAMESPACE: `${__dirname}/data/declarations/basic_namespace.json`, + CONSUMER_NAME: 'My_Consumer', + ENDPOINTLIST: `${__dirname}/data/declarations/system_poller_endpointlist.json`, + FILTER: `${__dirname}/data/declarations/filter_system_poller.json`, + FILTERING_WITH_MATCHING: `${__dirname}/data/declarations/system_poller_matched_filtering.json`, + NAMESPACE_NAME: 'My_Namespace', + PULL_CONSUMER_BASIC: `${__dirname}/data/declarations/pull_consumer_basic.json`, + PULL_CONSUMER_WITH_NAMESPACE: `${__dirname}/data/declarations/pull_consumer_with_namespace.json`, + SNMP_METRICS: `${__dirname}/data/declarations/system_poller_snmp_metrics.json`, + SYSTEM_NAME: 'My_System', + SYSTEM_POLLER_SCHEMA: fs.realpathSync(`${__dirname}/../../../shared/output_schemas/system_poller_schema.json`) + }, + HTTP_REQUEST: { + PORT: 443, + PROTOCOL: 'https', + STRICT_SSL: true + }, + TELEMETRY: { + API: { + URI: { + PREFIX: '/mgmt/shared/telemetry' + } + }, + LISTENER: { + PORT: { + DEFAULT: 6514, + NAMESPACE: 56516, + NAMESPACE_SECONDARY: 56517, + SECONDARY: 56515 + }, + PROTOCOLS: [ + 'tcp', + 'udp' + ] + }, + NAMESPACE: { + DEFAULT: 'f5telemetry_default' + } + } }; diff --git a/test/functional/shared/basic.json b/test/functional/shared/data/declarations/basic.json similarity index 100% rename from test/functional/shared/basic.json rename to test/functional/shared/data/declarations/basic.json diff --git a/test/functional/shared/basic_namespace.json b/test/functional/shared/data/declarations/basic_namespace.json similarity index 100% rename from test/functional/shared/basic_namespace.json rename to test/functional/shared/data/declarations/basic_namespace.json diff --git a/test/functional/shared/filter_system_poller.json b/test/functional/shared/data/declarations/filter_system_poller.json similarity index 100% rename from test/functional/shared/filter_system_poller.json rename to test/functional/shared/data/declarations/filter_system_poller.json diff --git a/test/functional/shared/pull_consumer_basic.json b/test/functional/shared/data/declarations/pull_consumer_basic.json similarity index 100% rename from test/functional/shared/pull_consumer_basic.json rename to test/functional/shared/data/declarations/pull_consumer_basic.json diff --git a/test/functional/shared/pull_consumer_with_namespace.json b/test/functional/shared/data/declarations/pull_consumer_with_namespace.json similarity index 100% rename from test/functional/shared/pull_consumer_with_namespace.json rename to test/functional/shared/data/declarations/pull_consumer_with_namespace.json diff --git a/test/functional/shared/system_poller_chained_actions.json b/test/functional/shared/data/declarations/system_poller_chained_actions.json similarity index 100% rename from test/functional/shared/system_poller_chained_actions.json rename to test/functional/shared/data/declarations/system_poller_chained_actions.json diff --git a/test/functional/shared/system_poller_endpointlist.json b/test/functional/shared/data/declarations/system_poller_endpointlist.json similarity index 100% rename from test/functional/shared/system_poller_endpointlist.json rename to test/functional/shared/data/declarations/system_poller_endpointlist.json diff --git a/test/functional/shared/system_poller_matched_filtering.json b/test/functional/shared/data/declarations/system_poller_matched_filtering.json similarity index 100% rename from test/functional/shared/system_poller_matched_filtering.json rename to test/functional/shared/data/declarations/system_poller_matched_filtering.json diff --git a/test/functional/shared/data/declarations/system_poller_snmp_metrics.json b/test/functional/shared/data/declarations/system_poller_snmp_metrics.json new file mode 100644 index 00000000..7dd552e8 --- /dev/null +++ b/test/functional/shared/data/declarations/system_poller_snmp_metrics.json @@ -0,0 +1,38 @@ +{ + "class": "Telemetry", + "controls": { + "class": "Controls", + "logLevel": "debug", + "debug": true + }, + "SNMP_Endpoints": { + "class": "Telemetry_Endpoints", + "items": { + "hrDeviceStatusAsMetric": { + "protocol": "snmp", + "path": "hrDeviceStatus.196608", + "numericalEnums": true + }, + "hrDeviceStatusOrigin": { + "protocol": "snmp", + "path": "hrDeviceStatus.196608" + }, + "hrDeviceStatusOriginWithOptions": { + "protocol": "snmp", + "path": "hrDeviceStatus.196608", + "numericalEnums": false + } + } + }, + "My_System": { + "class": "Telemetry_System", + "systemPoller": [ + "My_System_Poller" + ] + }, + "My_System_Poller": { + "class": "Telemetry_System_Poller", + "interval": 60, + "endpointList": "SNMP_Endpoints" + } +} \ No newline at end of file diff --git a/test/functional/shared/harness.js b/test/functional/shared/harness.js new file mode 100644 index 00000000..641f44d8 --- /dev/null +++ b/test/functional/shared/harness.js @@ -0,0 +1,446 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const fs = require('fs'); + +const DockerConnector = require('./connectors/dockerConnector').DockerConnector; +const DUT_VARS = require('./constants').ENV_VARS.TEST_CONTROLS.DUT; +const HARNESS_VARS = require('./constants').ENV_VARS.TEST_CONTROLS.HARNESS; +const logger = require('./utils/logger').getChild('harness'); +const miscUtils = require('./utils/misc'); +const promiseUtils = require('./utils/promise'); +const remoteHost = require('./remoteHost'); +const TelemetryStreamingConnector = require('./connectors/telemetryConnector').TelemetryStreamingConnector; + +/** + * @module test/functional/shared/harness + * + * @typedef {import("./remoteHost/appLXConnector").AppLXConnector} AppLXConnector + * @typedef {import("./connectors/dockerConnector").DockerCommandOptions} DockerCommandOptions + * @typedef {import("./utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("./remoteHost/httpConnector").HTTPConnector} HTTPConnector + * @typedef {import("./remoteHost/httpConnector").HTTPConnectorManager} HTTPConnectorManager + * @typedef {import("./remoteHost/icontrolAPI.js").IControlAPI} IControlAPI + * @typedef {import("./remoteHost/icontrolConnector").IControlConnector} IControlConnector + * @typedef {import("./remoteHost/sshConnector").SSHConnector} SSHConnector + * @typedef {import("./remoteHost/sshConnector").SSHExecResponse} SSHExecResponse + */ + +const DEFAULTS = Object.freeze({ + ICONTROL: Object.freeze({ + allowSelfSignedCert: false, + port: 443 + }), + SSH: Object.freeze({ + port: 22 + }) +}); + +let DEFAULT_HARNESS; + +/** + * F5 BIG-IP Device + * + * @property {AppLXConnector} appLX.default - default App LX Connector + * @property {HTTPConnector} http.icontrol - iControl HTTP Connector + * @property {IControlAPI} icAPI.default - default iControl API Connector + * @property {IControlConnector} icontrol.default - default iControl Connector + * @property {SSHConnector} ssh.default - default SSH Connector + * @property {TelemetryStreamingConnector} telemetry - Telemetry Streaming Connector + */ +class BigIp extends remoteHost.F5BigDevice { + /** + * Constructor + * + * @param {string} name - name + * @param {string} host - remote host + * @param {Object} options - connection options + * @param {IControlConfig} options.icontrol - iControl connection options + * @param {string} options.hostname - hostname + * @param {SSHConfig} options.ssh - SSH connection options + */ + constructor(name, host, options) { + super(host); + + Object.defineProperties(this, { + logger: { + value: this.host.logger.getChild('f5bigip').getChild(name) + }, + hostname: { + value: options.hostname + }, + name: { + value: name + } + }); + + const icOpts = assignDefaults( + Object.assign({}, options.icontrol), + DEFAULTS.ICONTROL + ); + const sshOpts = assignDefaults( + Object.assign({}, options.ssh || {}), + DEFAULTS.SSH + ); + + this.initHTTPStack() + .createAndSave('icontrol', { + allowSelfSignedCert: icOpts.allowSelfSignedCert, + port: icOpts.port + }); + + this.initIControlStack() + .createAndSave('default', { + passphrase: icOpts.passphrase, + transport: this.http.icontrol, + username: icOpts.username + }); + + this.initAppLXStack() + .createAndSave('default', { + icontrol: this.icontrol.default + }); + + this.initIControlAPIStack() + .createAndSave('default', { + icontrol: this.icontrol.default + }); + + this.initSSHStack() + .createAndSave('default', { + encoding: 'utf8', + password: sshOpts.passphrase, + port: sshOpts.port, + privateKey: sshOpts.privateKey, + username: sshOpts.username + }); + + this.initTCPStack({ + unref: true + }); + this.initUDPStack(); + + Object.defineProperties(this, { + telemetry: { + value: new TelemetryStreamingConnector( + this.icontrol.default, + { + logger: this.logger + } + ) + } + }); + } + + /** + * Teardown all connectors + * + * @returns {Promise} resolved once all connectors closed + */ + teardown() { + return promiseUtils.allSettled([ + this.ssh.default.terminate() + ]); + } +} + +/** + * Remote Server + * + * @property {SSHConnector} ssh.default - default SSH Connector + * @property {DockerConnector} docker - Docker Connector + */ +class RemoteServer extends remoteHost.RemoteDevice { + /** + * Constructor + * + * @param {string} name - name + * @param {string} host - remote host + * @param {Object} options - connection options + * @param {SSHConfig} options.ssh - SSH connection options + */ + constructor(name, host, options) { + super(host); + + Object.defineProperties(this, { + logger: { + value: this.host.logger.getChild('server').getChild(name) + }, + name: { + value: name + } + }); + + const sshOpts = assignDefaults( + Object.assign({}, options.ssh || {}), + { + port: DEFAULTS.SSH_PORT + } + ); + + this.initHTTPStack(); + this.initSSHStack() + .createAndSave('default', { + encoding: 'utf8', + password: sshOpts.passphrase, + port: sshOpts.port, + privateKey: sshOpts.privateKey, + username: sshOpts.username + }); + + Object.defineProperties(this, { + docker: { + value: new DockerConnector( + this.ssh.default, + { + logger: this.logger + } + ) + } + }); + } + + /** + * Teardown all connectors + * + * @returns {Promise} resolved once all connectors closed + */ + teardown() { + return promiseUtils.allSettled([ + this.ssh.default.terminate() + ]); + } +} + +/** + * Get default harness + * + * @returns {Harness} harness + */ +function getDefaultHarness() { + return DEFAULT_HARNESS; +} + +/** + * Initialize harness using info from env vars + * + * @returns {Harness} harness + */ +function initializeFromEnv() { + logger.info('Initializing harness using ENV variables'); + + const harnessFilePath = miscUtils.getEnvArg(HARNESS_VARS.FILE); + logger.info('Trying to read data from file', { + envVar: HARNESS_VARS.FILE, + path: harnessFilePath + }); + + if (fs.existsSync(harnessFilePath)) { + let data = fs.readFileSync(harnessFilePath); + data = JSON.parse(data); + + logger.info(`Harness parsed from file! ${data.length} objects in it`); + return initializeFromJSON(data); + } + throw new Error('Unable to initialize harness from env vars: not enough data'); +} + +/** + * Initialize harness from harness file + * + * @param {Array} harness - harness data + * + * @returns {Harness} harness + */ +function initializeFromJSON(harness) { + logger.info('Parsing harness JSON data'); + + const dutIgnorePattern = miscUtils.getEnvArg(DUT_VARS.EXCLUDE, { defaultValue: '' }); + const dutIncludePattern = miscUtils.getEnvArg(DUT_VARS.INCLUDE, { defaultValue: '' }); + + let bigipFilter; + if (dutIgnorePattern || dutIncludePattern) { + logger.info('Filtering BIG-IP by hostname using following patterns', { + ignore: dutIgnorePattern, + include: dutIncludePattern + }); + + let ignoreFilter = () => true; // accept by default + if (dutIgnorePattern) { + const regex = new RegExp(dutIgnorePattern, 'i'); + ignoreFilter = (hostname) => !hostname.match(regex); + } + let includeFilter = () => true; // accept by default + if (dutIncludePattern) { + const regex = new RegExp(dutIncludePattern, 'i'); + includeFilter = (hostname) => hostname.match(regex); + } + bigipFilter = (hostname) => includeFilter(hostname) && ignoreFilter(hostname); + } + + const ret = { + bigip: [], + other: [] + }; + harness.forEach((item, idx) => { + if (item.is_f5_device) { + if (bigipFilter && !bigipFilter(item.f5_hostname)) { + logger.warning('Ignoring F5 Device with hostname', { hostname: item.f5_hostname }); + } else { + ret.bigip.push(new BigIp( + (item.f5_hostname && item.f5_hostname.indexOf('bigip') < item.f5_hostname.indexOf('.')) + ? item.f5_hostname.substring(item.f5_hostname.indexOf('bigip'), item.f5_hostname.indexOf('.')) + : `bigip_${idx}`, + item.admin_ip, + { + icontrol: { + allowSelfSignedCert: item.f5_validate_certs === false, + username: item.f5_rest_user.username, + passphrase: item.f5_rest_user.password, + port: item.f5_rest_api_port + }, + hostname: item.f5_hostname, + ssh: { + username: item.ssh_user.username, + passphrase: item.ssh_user.password, + port: item.ssh_port + } + } + )); + } + } else { + ret.other.push(new RemoteServer( + item.hostname, + item.admin_ip, + { + ssh: { + username: item.ssh_user.username, + passphrase: item.ssh_user.password, + port: item.ssh_port + } + } + )); + } + }); + + logger.info('Harness parsed!', { + bigip: ret.bigip.length, + other: ret.other.length + }); + + return ret; +} + +/** + * Set harness as default + * + * @param {Harness} harness - harness + */ +function setDefaultHarness(harness) { + DEFAULT_HARNESS = harness; +} + +module.exports = { + BigIp, + RemoteServer, + + getDefaultHarness, + initializeFromEnv, + initializeFromJSON, + setDefaultHarness, + + /** + * Docker Helpers + */ + docker: { + /** + * Start new container + * + * @param {DockerConnector} docker - Docker Connector instance + * @param {DockerCommandOptions} options - Docker `run` options + * @param {boolean} options.detach - should be set to 'true' + * @param {string} options.name - container name + * + * @returns {Promise} resolved once container started + */ + startNewContainer(docker, options) { + docker.logger.info('Starting new container', { options }); + return docker.run(options) + .then((containerID) => promiseUtils.retry(() => { + docker.logger.info('Container started!', { containerID }); + return docker.containers() + .then((running) => { + if (running.find((ci) => ci.id === containerID)) { + docker.logger.info('Container running!', { containerID }); + return Promise.resolve(); + } + return Promise.reject(new Error(`Unable to find container "${containerID}" in running containers list!`)); + }); + }, { + maxTrues: 10, + delay: 100 + })); + }, + + /** + * Remove container(s) and verify it stopped + * + * @param {DockerConnector} docker - Docker Connector instance + * @param {Array | string} cidOrName - container ID(s) and/or name(s) + * + * @returns {Promise} resolved once container(s) stopped and removed + */ + stopAndRemoveContainer(docker, cidOrName) { + docker.logger.info('Stopping and removing FluentD container', { cidOrName }); + cidOrName = Array.isArray(cidOrName) + ? cidOrName + : [cidOrName]; + return promiseUtils.loopUntil( + (breakCb) => docker.containers(true) + .then((containers) => containers + .filter((ci) => cidOrName.indexOf(ci.name) !== -1 || cidOrName.indexOf(ci.id) !== -1)) + .then((toStopAndRemove) => { + if (toStopAndRemove.length === 0) { + return breakCb(); + } + return docker.removeContainer(toStopAndRemove.map((ci) => ci.id)) + .catch(() => { + docker.logger.info('Got error on attempt to stop and remove container(s). Going to re-try after 500ms.', { toStopAndRemove }); + return promiseUtils.sleep(500); + }); + }) + ); + } + } +}; + +/** + * @typedef IControlConfig + * @type {Object} + * @property {boolean} [allowSelfSignedCert = false] - allow self-signed certs + * @property {string} username - iControl username + * @property {string} passphrase - iControl passphrase + * @property {number} [port = 443] - iControl port + */ +/** + * @typedef SSHConfig + * @type {Object} + * @property {string} [username] - SSH username + * @property {string} [passphrase] - SSH passphrase + * @property {number} [port = 22] - SSH port + * @property {string} [privateKey] - private key for either key-based or hostbased user authentication + */ +/** + * @typedef Harness + * @type {Object} + * @property {Array} bigip - BIG-IP devices + * @property {Array} other - other devices + */ diff --git a/test/functional/shared/remoteHost/appLXConnector.js b/test/functional/shared/remoteHost/appLXConnector.js new file mode 100644 index 00000000..94323fd6 --- /dev/null +++ b/test/functional/shared/remoteHost/appLXConnector.js @@ -0,0 +1,273 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const hasIn = require('lodash/hasIn'); +const icrdk = require('icrdk'); // eslint-disable-line import/no-extraneous-dependencies + +/** + * @module test/functional/shared/remoteHost/appLXConnector + * + * @typedef {import("./icontrolConnector").IControlConnector} IControlConnector + * @typedef {import("../utils/logger").Logger} Logger + */ + +const F5_AUTH_HEADER = 'x-f5-auth-token'; + +/** + * LX Application Connector + * + * @property {Logger} logger - logger + * @property {IControlConnector} icontrol - iControl Connector + */ +class AppLXConnector { + /** + * Constructor + * + * @param {IControlConnector} icontrol - iControl Connector + * @param {Object} [options] - options + * @param {Logger} [options.logger] - logger + */ + constructor(icontrol, options) { + Object.defineProperties(this, { + icontrol: { + value: icontrol + } + }); + + options = options || {}; + this.logger = (options.logger || this.icontrol.logger).getChild('appLX'); + } + + /** + * Install ILX package + * + * @param {string} file - local file (RPM) to install + * @param {boolean} [reuse] - re-use package that installed already + * + * @returns {Promise} resolved upon completion + */ + install(file, reuse) { + this.logger.info('Installing LX package', { file, reuse }); + return this.icontrol.echo() + .then(() => new Promise((resolve, reject) => { + icrdk.deployToBigIp({ + AUTH_TOKEN: this.icontrol.authToken, + HOST: this.icontrol.transport.host.host, + PORT: this.icontrol.transport.defaults().port + }, file, (err) => { + if (err) { + // resolve if error is because the package is already installed + // in that case error is of type 'string' - instead of in .message + if (reuse && /already installed/.test(err)) { + this.logger.info('Package installed already.', { file, reuse }); + resolve(); + } else { + reject(err); + } + } else { + this.logger.info('Package installed.', { file, reuse }); + resolve(); + } + }); + })); + } + + /** + * Get list of installed ILX packages + * + * @returns {Promise>} resolved upon completion + */ + list() { + this.logger.info('Listing LX packages'); + return this.icontrol.echo() + .then(() => new Promise((resolve, reject) => { + // icrdk bug - should pass headers and should send additional requests + const opts = { + headers: { + [F5_AUTH_HEADER]: this.icontrol.authToken + }, + HOST: this.icontrol.transport.host.host, + PORT: this.icontrol.transport.defaults().port + }; + const checkDataAndRetry = (data) => { + if (data.queryResponse) { + this.logger.info('List of installed LX applications', { packages: data.queryResponse }); + resolve(data.queryResponse); + } else if (data.selfLink) { + setTimeout(() => { + this.icontrol.makeRequestWithAuth({ + method: 'GET', + uri: data.selfLink.replace('https://localhost', '') + }) + .then(checkDataAndRetry); + }, 300); + } else { + reject(new Error(`Unable to fetch data. Unexpected response: ${JSON.stringify(data)}`)); + } + }; + + icrdk.queryInstalledPackages(opts, (err, queryResults) => { + if (err) { + reject(err); + } else { + checkDataAndRetry(queryResults); + } + }); + })); + } + + /** + * Uninstall ILX package + * + * @param {string} packageName - package to remove from device, should be RPM full name (not path) + * + * @returns {Promise} resolved upon completion + */ + uninstall(packageName) { + this.logger.info('Uninstalling LX package', { packageName }); + return this.icontrol.echo() + .then(() => new Promise((resolve, reject) => { + icrdk.uninstallPackage({ + AUTH_TOKEN: this.icontrol.authToken, + HOST: this.icontrol.transport.host.host, + PORT: this.icontrol.transport.defaults().port + }, packageName, (err) => { + if (err) { + reject(err); + } else { + this.logger.info('Package uninstalled', { packageName }); + resolve(); + } + }); + })); + } +} + +/** + * LX Application Connector Manager + * + * @property {Logger} logger - logger + * @property {IControlConnector} icontrol - iControl Connector + */ +class AppLXConnectorManager { + /** + * Constructor + * + * @param {AppLXConnectorManagerOptions} options - options + * @param {IControlConnector} options.icontrol - iControl Connector + * @param {Logger} [options.logger] - logger + */ + constructor(options) { + options = options || {}; + this.logger = (options.logger || options.icontrol.transport.host.logger); + + Object.defineProperties(this, { + icontrol: { + value: options.icontrol + } + }); + } + + /** + * Create new LX Application Connector instance + * + * @param {AppLXConnectorManagerOptions} [options] - options + * + * @returns {AppLXConnector} instance + */ + create(options) { + options = options || {}; + return new AppLXConnector( + options.icontrol || this.icontrol, + { + logger: this.logger + } + ); + } + + /** + * Create new LX Application Connector instance and save as property + * + * @param {string} name - name to use to save instance as property + * @param {AppLXConnectorManagerOptions} [options] - options + * + * @returns {AppLXConnector} instance + */ + createAndSave(name) { + if (hasIn(this, name)) { + throw new Error(`Can't assign AppLXConnector to '${name}' property - exists already!`); + } + Object.defineProperty(this, name, { + configurable: true, + value: this.create.apply(this, Array.from(arguments).slice(1)) + }); + return this[name]; + } + + /** + * Install ILX package + * + * @param {string} file - local file (RPM) to install + * @param {AppLXConnectorManagerOptions} [options] - options + * @param {boolean} [options.reuse] - re-use package that installed already + * + * @returns {Promise} resolved upon completion + */ + install(file, options) { + options = options || {}; + return this.create({ icontrol: options.icontrol }).install(file, options.reuse); + } + + /** + * Get list of installed ILX packages + * + * @param {AppLXConnectorManagerOptions} [options] - options + * + * @returns {Promise>} resolved upon completion + */ + list(options) { + options = options || {}; + return this.create({ icontrol: options.icontrol }).list(); + } + + /** + * Uninstall ILX package + * + * @param {string} packageName - package to remove from device, should be RPM full name (not path) + * @param {AppLXConnectorManagerOptions} [options] - options + * + * @returns {Promise} resolved upon completion + */ + uninstall(packageName, options) { + options = options || {}; + return this.create({ icontrol: options.icontrol }).uninstall(packageName); + } +} + +module.exports = { + AppLXConnector, + AppLXConnectorManager +}; + +/** + * @typedef LXPackageInfo + * @type {Object} + * @property {string} name - application name + * @property {string} version - version + * @property {string} release - release number + * @property {string} arch - architecture + * @property {string} packageName - RPM full name + * @property {Array} tags - tags + */ +/** + * @typedef AppLXConnectorManagerOptions + * @type {Object} + * @property {IControlConnector} [icontrol] - iControl Connector + */ diff --git a/test/functional/shared/remoteHost/f5BigDevice.js b/test/functional/shared/remoteHost/f5BigDevice.js new file mode 100644 index 00000000..db2984d1 --- /dev/null +++ b/test/functional/shared/remoteHost/f5BigDevice.js @@ -0,0 +1,113 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const AppLXConnectorManager = require('./appLXConnector').AppLXConnectorManager; +const IControlAPIManager = require('./icontrolAPI').IControlAPIManager; +const IControlConnectorManager = require('./icontrolConnector').IControlConnectorManager; +const RemoteDevice = require('./remoteDevice'); + +/** + * @module test/functional/shared/remoteHost/f5BigDevice + * + * @typedef {import("./appLXConnector").AppLXConnectorManagerOptions} AppLXConnectorManagerOptions + * @typedef {import("./icontrolAPI").IControlAPIManagerOptions} IControlAPIManagerOptions + * @typedef {import("./icontrolConnector").IControlConnectorManagerOptions} IControlConnectorManagerOptions + * @typedef {import("./icontrolConnector").IControlConnector} IControlConnector + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/request").RequestOptions} RequestOptions + */ + +/** + * F5 Device + * + * @property {AppLXConnectorManager} appLX - App LX Connector(s) manager + * @property {IControlAPIManager} icAPI - iControl API manager + * @property {IControlConnector} icontrol - iControl Connector(s) manager + * @property {Logger} logger - logger + */ +class F5BigDevice extends RemoteDevice { + /** + * Constructor + * + * @param {string} host - remote host + */ + constructor(host) { + super(host); + this.logger = this.host.logger.getChild('f5Device'); + } + + /** + * Initialize App LX Connector(s) manager + * + * @param {AppLXConnectorManagerOptions} options - options + * + * @returns {AppLXConnectorManager} instance + */ + initAppLXStack(options) { + Object.defineProperty( + this, 'appLX', { + value: new AppLXConnectorManager(Object.assign( + {}, + Object.assign({}, options || {}), + { + logger: this.logger + } + )) + } + ); + return this.appLX; + } + + /** + * Initialize iControl API manager + * + * @param {IControlAPIManagerOptions} options - options + * + * @returns {IControlAPIManager} instance + */ + initIControlAPIStack(options) { + Object.defineProperty( + this, 'icAPI', { + value: new IControlAPIManager(Object.assign( + {}, + Object.assign({}, options || {}), + { + logger: this.logger + } + )) + } + ); + return this.icAPI; + } + + /** + * Initialize iControl Connector(s) manager + * + * @param {IControlConnectorManagerOptions} options - options + * + * @returns {IControlConnectorManager} instance + */ + initIControlStack(options) { + Object.defineProperty( + this, 'icontrol', { + value: new IControlConnectorManager(Object.assign( + {}, + Object.assign({}, options || {}), + { + logger: this.logger + } + )) + } + ); + return this.icontrol; + } +} + +module.exports = F5BigDevice; diff --git a/test/functional/shared/remoteHost/httpConnector.js b/test/functional/shared/remoteHost/httpConnector.js new file mode 100644 index 00000000..7dd40182 --- /dev/null +++ b/test/functional/shared/remoteHost/httpConnector.js @@ -0,0 +1,274 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const cloneDeep = require('lodash/cloneDeep'); +const hasIn = require('lodash/hasIn'); +const hasKey = require('lodash/has'); + +const constants = require('../constants'); +const promiseUtil = require('../utils/promise'); +const request = require('../utils/request'); + +/** + * @module test/functional/shared/remoteHost/httpConnector + * + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("./remoteHost").RemoteHost} RemoteHost + * @typedef {import("../utils/request").RequestOptions} RequestOptions + */ + +const DEFAULTS = Object.freeze({ + allowSelfSignedCert: !constants.HTTP_REQUEST.STRICT_SSL, + continueOnErrorCode: false, + expectedResponseCode: 200, + includeResponseObject: false, + json: false, + method: 'GET', + port: constants.HTTP_REQUEST.PORT, + protocol: constants.HTTP_REQUEST.PROTOCOL, + rawResponseBody: false +}); + +/** + * HTTP Connector to RemoteHost + * + * @property {boolean} allowSelfSignedCert - do not require SSL certificates be valid + * @property {boolean} continueOnErrorCode - continue on non-successful response code + * @property {Array|integer} expectedResponseCode - expected response code + * @property {boolean} gzip - accept compressed content from the server + * @property {Object} headers - HTTP headers + * @property {RemoteHost} host - remote host + * @property {boolean} includeResponseObject - return [body, responseObject] + * @property {boolean} json - sets HTTP body to JSON representation of value + * @property {Logger} logger - logger + * @property {string} method - HTTP method + * @property {integer} port - HTTP port + * @property {string} protocol - HTTP protocol + * @property {string | URL} proxy - proxy URI or proxy config + * @property {boolean} rawResponseBody - return response as Buffer object with binary data + * @property {PromiseRetryOptions} retryOptions - retry options + * @property {integer} timeout - milliseconds to wait for a socket timeout (option from 'request' library) + */ +class HTTPConnector { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {RequestOptions} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, options) { + // need to add cloneDeep in case when DEFAULTS contains complex objects + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + [ + 'allowSelfSignedCert', + 'continueOnErrorCode', + 'gzip', + 'includeResponseObject', + 'json', + 'method', + 'port', + 'protocol', + 'rawResponseBody', + 'timeout' + ].forEach((propName) => { + if (hasKey(options, propName)) { + Object.defineProperty(this, propName, { + value: options[propName] + }); + } + }); + // copy complex objects + [ + 'expectedResponseCode', + 'headers', + 'proxy' + ].forEach((propName) => { + if (hasKey(options, propName)) { + const originVal = cloneDeep(options[propName]); + Object.defineProperty(this, propName, { + get() { return cloneDeep(originVal); } + }); + } + }); + + const retryOptions = Object.assign({}, options.retry || {}); + Object.defineProperties(this, { + host: { + value: host + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + } + }); + this.logger = (options.logger || this.host.logger).getChild('http'); + } + + /** + * Get defaults for options + * + * @returns {RequestOptions} defaults + */ + defaults() { + const defaults = {}; + [ + 'allowSelfSignedCert', + 'continueOnErrorCode', + 'expectedResponseCode', + 'gzip', + 'headers', + 'includeResponseObject', + 'json', + 'logger', + 'method', + 'port', + 'protocol', + 'proxy', + 'rawResponseBody', + 'timeout' + ].forEach((propName) => { + if (hasKey(this, propName)) { + defaults[propName] = this[propName]; + } + }); + return defaults; + } + + /** + * Make HTTP request + * + * @param {RequestOptions} options - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once response received and processed + */ + makeRequest(options) { + options = Object.assign({}, options); + const retryOpts = assignDefaults( + Object.assign({}, options.retry || {}), + this.retryOptions + ); + delete options.retry; + + return promiseUtil.retry(() => new Promise((resolve, reject) => { + options = assignDefaults(options, this.defaults()); + options.host = undefined; + options.fullURI = undefined; + + request(this.host.host, options) + .then(resolve, reject); + }), retryOpts); + } +} + +/** + * HTTP Connector Manager + * + * @property {RemoteHost} host - remote host + * @property {Logger} logger - logger + * @property {RequestOptions} options - SSH options + * @property {PromiseRetryOptions} retryOptions - retry options + */ +class HTTPConnectorManager { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {RequestOptions} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, options) { + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + const retryOptions = Object.assign({}, options.retry || {}); + delete options.retry; + + this.logger = (options.logger || this.host.logger); + delete options.logger; + + options = cloneDeep(options); + + Object.defineProperties(this, { + host: { + value: host + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + }, + options: { + get() { return Object.assign({}, options); } + } + }); + } + + /** + * Create new HTTP Connector instance + * + * @param {RequestOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {HTTPConnector} instance + */ + create(options) { + return new HTTPConnector(this.host, assignDefaults( + Object.assign({}, options || {}), + Object.assign(this.options, { + logger: this.logger, + retry: this.retryOptions + }) + )); + } + + /** + * Create new HTTP Connector instance and save as property + * + * @param {string} name - name to use to save instance as property + * @param {RequestOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {HTTPConnector} instance + */ + createAndSave(name) { + if (hasIn(this, name)) { + throw new Error(`Can't assign HTTPConnector to '${name}' property - exists already!`); + } + Object.defineProperty(this, name, { + configurable: true, + value: this.create.apply(this, Array.from(arguments).slice(1)) + }); + return this[name]; + } + + /** + * Make HTTP request + * + * @param {RequestOptions} options - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once response received and processed + */ + makeRequest(options) { + return this.create().makeRequest(options); + } +} + +module.exports = { + HTTPConnector, + HTTPConnectorManager +}; diff --git a/test/functional/shared/remoteHost/icontrolAPI.js b/test/functional/shared/remoteHost/icontrolAPI.js new file mode 100644 index 00000000..5600dcd9 --- /dev/null +++ b/test/functional/shared/remoteHost/icontrolAPI.js @@ -0,0 +1,168 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const hasIn = require('lodash/hasIn'); + +/** + * @module test/functional/shared/remoteHost/icontrolAPI + * + * @typedef {import("./icontrolConnector").IControlConnector} IControlConnector + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + */ + +/** + * iControl API + * + * @property {Logger} logger - logger + * @property {IControlConnector} icontrol - iControl Connector + */ +class IControlAPI { + /** + * Constructor + * + * @param {IControlConnector} icontrol - iControl Connector + * @param {Object} [options] - options + * @param {Logger} [options.logger] - logger + */ + constructor(icontrol, options) { + Object.defineProperties(this, { + icontrol: { + value: icontrol + } + }); + + options = options || {}; + this.logger = (options.logger || this.icontrol.logger).getChild('icAPI'); + } + + /** + * Fetch BIG-IP software version + * + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved with software version + */ + getSoftwareVersion(retry) { + return this.icontrol.makeRequestWithAuth({ + method: 'GET', + retry, + uri: '/mgmt/tm/sys/clock' + }) + .then((data) => data.selfLink.split('ver=')[1]); + } + + /** + * Run shell command using BASH endpoint + * + * @param {string} cmd - command + * @param {PromiseRetryOptions} [retry] - re-try options + * + * @returns {Promise} resolved with response + */ + runBashCmd(cmd, retry) { + return this.icontrol.makeRequestWithAuth({ + body: { + command: 'run', + utilCmdArgs: `-c "${cmd}"` + }, + json: true, + method: 'POST', + retry, + uri: '/mgmt/tm/util/bash' + }); + } +} + +/** + * iControl API Manager + * + * @property {Logger} logger - logger + * @property {IControlConnector} icontrol - iControl Connector + */ +class IControlAPIManager { + /** + * Constructor + * + * @param {IControlAPIManagerOptions} options - options + * @property {IControlConnector} options.icontrol - iControl Connector + * @param {Logger} [options.logger] - logger + */ + constructor(options) { + options = options || {}; + this.logger = (options.logger || options.icontrol.transport.host.logger); + + Object.defineProperties(this, { + icontrol: { + value: options.icontrol + } + }); + } + + /** + * Create new iControl API instance + * + * @param {IControlAPIManagerOptions} [options] - options + * + * @returns {IControlAPI} instance + */ + create(options) { + options = options || {}; + return new IControlAPI( + options.icontrol || this.icontrol, + { + logger: this.logger + } + ); + } + + /** + * Create new iControl API instance and save as property + * + * @param {string} name - name to use to save instance as property + * @param {IControlAPIManagerOptions} [options] - options + * + * @returns {IControlAPI} instance + */ + createAndSave(name) { + if (hasIn(this, name)) { + throw new Error(`Can't assign IControlAPI to '${name}' property - exists already!`); + } + Object.defineProperty(this, name, { + configurable: true, + value: this.create.apply(this, Array.from(arguments).slice(1)) + }); + return this[name]; + } + + /** + * Fetch BIG-IP software version + * + * @param {IControlAPIManagerOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - re-try options + * + * @returns {Promise} resolved with software version + */ + getSoftwareVersion(options) { + options = options || {}; + return this.create({ icontrol: options.icontrol }).getSoftwareVersion(options.retry); + } +} + +module.exports = { + IControlAPI, + IControlAPIManager +}; + +/** + * @typedef IControlAPIManagerOptions + * @type {Object} + * @property {IControlConnector} [icontrol] - iControl Connector + */ diff --git a/test/functional/shared/remoteHost/icontrolConnector.js b/test/functional/shared/remoteHost/icontrolConnector.js new file mode 100644 index 00000000..ae9c2787 --- /dev/null +++ b/test/functional/shared/remoteHost/icontrolConnector.js @@ -0,0 +1,326 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const hasIn = require('lodash/hasIn'); + +/** + * @module test/functional/shared/remoteHost/icontrolConnector + * + * @typedef {import("./httpConnector").HTTPConnector} HTTPConnector + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("./remoteHost").RemoteHost} RemoteHost + * @typedef {import("../utils/request").RequestOptions} RequestOptions + */ + +const F5_AUTH_HEADER = 'x-f5-auth-token'; +const IControlAuthRequiredError = { + statusCode: 401, + statusMessage: 'F5 Authorization Required' +}; + +const PRIVATES = new WeakMap(); + +/** + * iControl HTTP Connector + * + * @property {string} authToken - current auth token + * @property {Logger} logger - logger + * @property {HTTPConnector} transport - HTTP transport + * @property {string} username - username + */ +class IControlConnector { + /** + * Constructor + * + * @param {string} username - username + * @param {string} passphrase - passphrase + * @param {HTTPConnector} transport - transport + * @param {Object} [options] - options + * @param {Logger} [options.logger] - logger + */ + constructor(username, passphrase, transport, options) { + Object.defineProperties(this, { + authToken: { + get() { return PRIVATES.get(this).authToken; } + }, + transport: { + value: transport + }, + username: { + value: username + } + }); + + options = options || {}; + this.logger = (options.logger || this.transport.logger).getChild(`iControl@${username}`); + + PRIVATES.set(this, { + authToken: null, + passphrase + }); + } + + /** + * Send simple 'echo' request to acquire auth token + * + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once completed + */ + echo(retry) { + return this.makeRequestWithAuth({ + method: 'GET', + retry, + uri: '/mgmt/shared/echo' + }); + } + + /** + * Request auth token + * + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved with auth token + */ + getAuthToken(retry) { + // remove old auth token + const headers = this.transport.headers || {}; + delete headers[F5_AUTH_HEADER]; + + return this.makeRequest({ + body: { + username: this.username, + password: PRIVATES.get(this).passphrase, + loginProviderName: 'tmos' + }, + continueOnErrorCode: false, + expectedResponseCode: 200, + headers, + includeResponseObject: false, + json: true, + method: 'POST', + rawResponseBody: false, + retry, + uri: '/mgmt/shared/authn/login' + }) + .then((data) => data.token.token); + } + + /** + * Make request to iControl + * + * Note: this method doesn't handle HTTP 401 + * + * @param {RequestOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved with response according to options + */ + makeRequest(options) { + return this.transport.makeRequest(options || {}); + } + + /** + * Make request using auth data + * + * Note: this method handles HTTP 401 + * + * @param {RequestOptions} [options] - request options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved with response according to options + */ + makeRequestWithAuth(options) { + options = options || {}; + options.headers = options.headers || this.transport.headers || {}; + + const originOptions = Object.assign({}, options); + const originIncludeResponseObject = options.includeResponseObject; + + options.includeResponseObject = true; + if (this.authToken) { + options.headers[F5_AUTH_HEADER] = this.authToken; + } + + return this.makeRequest(options) + .then( + (response) => ({ response }), + (error) => ({ error }) + ) + .then((ret) => { + if (ret.error) { + if (ret.error.statusCode !== IControlAuthRequiredError.statusCode) { + return Promise.reject(ret.error); + } + } else if (ret.response[1].statusCode !== IControlAuthRequiredError.statusCode) { + return Promise.resolve(originIncludeResponseObject + ? ret.response + : ret.response[0]); + } + this.logger.info('Need to request/renew auth token'); + // time to request/renew auth token + return this.getAuthToken() + .then((token) => { + this.logger.info('Got new auth token!'); + PRIVATES.get(this).authToken = token; + originOptions.headers[F5_AUTH_HEADER] = token; + return this.makeRequest(originOptions); + }); + }); + } +} + +/** + * iControl Connector Manager + * + * @property {Logger} logger - logger + * @property {HTTPConnector} transport - HTTP transport + * @property {string} username + */ +class IControlConnectorManager { + /** + * Constructor + * + * @param {IControlConnectorManagerOptions} options - options + * @param {HTTPConnector} options.transport - options + * @param {Logger} [options.logger] - logger + */ + constructor(options) { + options = options || {}; + this.logger = (options.logger || options.transport.host.logger); + + Object.defineProperties(this, { + transport: { + value: options.transport + }, + username: { + value: options.username + } + }); + + PRIVATES.set(this, { + passphrase: options.passphrase + }); + } + + /** + * Create new iControl Connector instance + * + * @param {IControlConnectorManagerOptions} [options] - options + * + * @returns {IControlConnector} instance + */ + create(options) { + options = assignDefaults( + Object.assign({}, options || {}), + { + passphrase: PRIVATES.get(this).passphrase, + transport: this.transport, + username: this.username + } + ); + return new IControlConnector( + options.username, + options.passphrase, + options.transport, + { + logger: this.logger + } + ); + } + + /** + * Create new iControl Connector instance and save as property + * + * @param {string} name - name to use to save instance as property + * @param {IControlConnectorManagerOptions} [options] - options + * + * @returns {IControlConnector} instance + */ + createAndSave(name) { + if (hasIn(this, name)) { + throw new Error(`Can't assign IControlConnector to '${name}' property - exists already!`); + } + Object.defineProperty(this, name, { + configurable: true, + value: this.create.apply(this, Array.from(arguments).slice(1)) + }); + return this[name]; + } + + /** + * Send simple 'echo' request to check auth + * + * @param {IControlConnectorManagerOptions} icOpts - options + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once completed + */ + echo(icOpts, retry) { + return this.create(icOpts).echo(retry); + } + + /** + * Request auth token + * + * @param {IControlConnectorManagerOptions} icOpts - options + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once completed + */ + getAuthToken(icOpts, retry) { + return this.create(icOpts).getAuthToken(retry); + } + + /** + * Make request to iControl + * + * Note: this method doesn't handle HTTP 401 + * + * @param {IControlConnectorManagerOptions} icOpts - options + * @param {RequestOptions} [reqOpts] - options + * @param {PromiseRetryOptions} [reqOpts.retry] - retry options + * + * @returns {Promise} resolved with response according to options + */ + makeRequest(icOpts, reqOpts) { + return this.create(icOpts).makeRequest(reqOpts); + } + + /** + * Make request using auth data + * + * Note: this method handles HTTP 401 + * + * @param {IControlConnectorManagerOptions} icOpts - options + * @param {RequestOptions} [reqOpts] - options + * @param {PromiseRetryOptions} [reqOpts.retry] - retry options + * + * @returns {Promise} resolved with response according to options + */ + makeRequestWithAuth(icOpts, reqOpts) { + return this.create(icOpts).makeRequestWithAuth(reqOpts); + } +} + +module.exports = { + IControlConnector, + IControlConnectorManager +}; + +/** + * @typedef IControlConnectorManagerOptions + * @type {Object} + * @property {string} [passphrase] - passphrase + * @property {HTTPConnector} [transport] - transport + * @property {string} [username] - username + */ diff --git a/test/functional/shared/remoteHost/index.js b/test/functional/shared/remoteHost/index.js new file mode 100644 index 00000000..10f7155f --- /dev/null +++ b/test/functional/shared/remoteHost/index.js @@ -0,0 +1,17 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const F5BigDevice = require('./f5BigDevice'); +const RemoteDevice = require('./remoteDevice'); + +module.exports = { + F5BigDevice, + RemoteDevice +}; diff --git a/test/functional/shared/remoteHost/remoteDevice.js b/test/functional/shared/remoteHost/remoteDevice.js new file mode 100644 index 00000000..36af78f3 --- /dev/null +++ b/test/functional/shared/remoteHost/remoteDevice.js @@ -0,0 +1,152 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const HTTPConnectorManager = require('./httpConnector').HTTPConnectorManager; +const RemoteHost = require('./remoteHost'); +const SSHConnectorManager = require('./sshConnector').SSHConnectorManager; +const TCPConnectorManager = require('./tcpConnector').TCPConnectorManager; +const UDPConnectorManager = require('./udpConnector').UDPConnectorManager; + +/** + * @module test/functional/shared/remoteHost/remoteDevice + * + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("./remoteHost").RemoteHost} RemoteHost + * @typedef {import("../utils/request").RequestOptions} RequestOptions + * @typedef {import("./sshConnector").SSHClientOptions} SSHClientOptions + * @typedef {import("./tcpConnector").TCPConnectorOptions} TCPConnectorOptions + */ + +/** + * Remote Device + * + * @property {RemoteHost} host - remote host + * @property {HTTPConnectorManager} http - HTTP Connector(s) manager + * @property {Logger} logger - logger + * @property {SSHConnectorManager} ssh - SSH connection(s) manager + * @property {TCPConnectorManager} tcp - TCP connection(s) manager + * @property {UDPConnectorManager} udp - UDP connection(s) manager + */ +class RemoteDevice { + /** + * Constructor + * + * @param {string} host - remote host + */ + constructor(host) { + Object.defineProperty(this, 'host', { + value: new RemoteHost(host) + }); + this.logger = this.host.logger.getChild('remoteDevice'); + } + + /** + * Initialize HTTP Connector(s) manager + * + * @param {RequestOptions} [options] - request options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {HTTPConnectorManager} instance + */ + initHTTPStack(options) { + Object.defineProperty( + this, 'http', { + value: new HTTPConnectorManager( + this.host, + Object.assign( + {}, + Object.assign({}, options || {}), + { + logger: this.logger + } + ) + ) + } + ); + return this.http; + } + + /** + * Initialize SSH Connector(s) manager + * + * @param {SSHClientOptions} [options] - SSH options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {TCPConnectorManager} instance + */ + initSSHStack(options) { + Object.defineProperty( + this, 'ssh', { + value: new SSHConnectorManager( + this.host, + Object.assign( + {}, + Object.assign({}, options || {}), + { + logger: this.logger + } + ) + ) + } + ); + return this.ssh; + } + + /** + * Initialize TCP Connector(s) manager + * + * @param {TCPConnectorOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {TCPConnectorManager} instance + */ + initTCPStack(options) { + Object.defineProperty( + this, 'tcp', { + value: new TCPConnectorManager( + this.host, + Object.assign( + {}, + Object.assign({}, options || {}), + { + logger: this.logger + } + ) + ) + } + ); + return this.tcp; + } + + /** + * Initialize UDP Connector(s) manager + * + * @param {PromiseRetryOptions} [options] - retry options + * + * @returns {UDPConnectorManager} instance + */ + initUDPStack(options) { + Object.defineProperty( + this, 'udp', { + value: new UDPConnectorManager( + this.host, + { + logger: this.logger, + retry: Object.assign({}, options || {}) + } + ) + } + ); + return this.udp; + } +} + +module.exports = RemoteDevice; diff --git a/test/functional/shared/remoteHost/remoteHost.js b/test/functional/shared/remoteHost/remoteHost.js new file mode 100644 index 00000000..4c5c2de4 --- /dev/null +++ b/test/functional/shared/remoteHost/remoteHost.js @@ -0,0 +1,41 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const mainLogger = require('../utils/logger').getChild('RH'); + +/** + * @module test/functional/shared/remoteHost/remoteHost + * + * @typedef {import("../utils/logger").Logger} Logger + */ + +/** + * Remote Host + * + * @property {string} host - remote host + * @property {Logger} logger - logger + */ +class RemoteHost { + /** + * Constructor + * + * @param {string} host - remote host/address + */ + constructor(host) { + Object.defineProperties(this, { + host: { + value: host + } + }); + this.logger = mainLogger.getChild(`[${this.host}]`); + } +} + +module.exports = RemoteHost; diff --git a/test/functional/shared/remoteHost/sshConnector.js b/test/functional/shared/remoteHost/sshConnector.js new file mode 100644 index 00000000..21188bf1 --- /dev/null +++ b/test/functional/shared/remoteHost/sshConnector.js @@ -0,0 +1,851 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const hasIn = require('lodash/hasIn'); +const SSHClient = require('ssh2').Client; + +const promiseUtil = require('../utils/promise'); + +/** + * @module test/functional/shared/remoteHost/sshConnector + * + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("./remoteHost").RemoteHost} RemoteHost + */ + +const DEFAULTS = Object.freeze({ + encoding: 'utf8', + port: 22, + retry: Object.freeze({ + delay: 100, + maxTries: 10 + }), + tryKeyboard: true +}); + +const PRIVATES = new WeakMap(); + +let SSH_REQ_ID = 0; + +/** + * Check SSH connection + * + * @this SSHConnector + * + * @returns {Promise} resolved once connection is ready + */ +function checkConnection(reqId) { + return new Promise((resolve, reject) => { + const privates = PRIVATES.get(this); + if (this.terminated) { + this.logger.error('Attempt to execute command using terminated SSH Connector!', { reqId }); + // eslint-disable-next-line no-promise-executor-return + reject(new Error('SSHConnector was terminated')); + } else { + if (privates.client === null) { + initConnection.call(this, privates, reqId); + } + privates.readyPromise.then(resolve, reject); + } + }); +} + +/** + * Check SFTP connection + * + * @this SSHConnector + * + * @returns {Promise} resolved once connection is ready + */ +function checkSftp(reqId) { + return checkConnection.call(this, reqId) + .then(() => { + const privates = PRIVATES.get(this); + if (privates.sftp === null) { + initSftp.call(this, privates, reqId); + } + return privates.sftpReadyPromise; + }); +} + +/** + * Cleanup + * + * @this SSHConnector + * @param {SSHClient} currentClient + */ +function cleanup(currentClient) { + const privates = PRIVATES.get(this); + if (privates.client === currentClient) { + privates.client = null; + privates.closePromise = null; + privates.readyPromise = null; + privates.sftp = null; + privates.sftpReadyPromise = null; + } +} + +/** + * Exec SFTP command + * + * @param {string} cmd - command + * @param {Array} cmdArgs - command arguments + * @param {PromiseRetryOptions} [retryOpts] - retry options + * + * @returns {Promise} resolved once command executed + */ +function execSftpCommand(cmd, cmdArgs, retryOpts) { + retryOpts = assignDefaults(Object.assign({}, retryOpts || {}), this.retryOptions); + + const reqId = nextReqId(); + const self = this; + this.logger.info('Going to exec SFTP command', { + cmd, + cmdArgs, + reqId + }); + + const originCb = retryOpts.callback; + retryOpts.callback = (error) => { + if (this.terminated) { + return false; + } + this.logger.error('Error on attempt to execute command... Going to retry if re-try attempts left', error, { + reqId, + tries: retryOpts.tries + }); + return originCb ? originCb(error) : true; + }; + + return checkSftp.call(this, reqId) + .then(() => new Promise((sftpResolve, sftpReject) => { + const privates = PRIVATES.get(this); + if (!privates.sftp) { + sftpReject(new Error('No SFTP client to use to execute command!')); + } else { + const isOk = privates.sftp[cmd].apply(privates.sftp, cmdArgs.slice(0).concat([function cb() { + if (arguments[0] instanceof Error) { + sftpReject(arguments[0]); + } else { + self.logger.info('SFTP command response', { + reqId, + cbArgs: arguments + }); + + let ret = Array.from(arguments); + if (ret[0] === null) { + // probably 'err' arg + ret = ret.slice(1); + } + if (ret.length === 1) { + ret = ret[0]; + } else if (ret.length === 0) { + ret = undefined; + } + + sftpResolve(ret); + } + }])); + if (isOk === false) { + sftpReject(new Error('SSHClientError: should wait for "continue" event before proceeding with next command. Try later')); + } + } + })); +} + +/** + * Initialize SSH connection + * + * @this SSHConnector + */ +function initConnection(privates, reqId) { + const sshOptions = Object.assign({ + host: this.host.host + }, this.sshOptions); + + this.logger.info('Creating new client', { reqId, sshOptions }); + + const client = new SSHClient(); + client.connect(sshOptions); + + privates.client = client; + + client.on('end', () => { + this.logger.info('Client received FIN packet'); + client.end(); + }); + // The 'close' event will be called directly following 'error'. + client.on('error', (error) => this.logger.error('Error caught', error)); + client.on('timeout', () => { + this.logger.error('Timeout. Destroying client'); + client.destroy(); + }); + + client.on('keyboard-interactive', (name, instructions, instructionsLang, prompts, finish) => { + this.logger.info('Keyboard interactive auth'); + finish([sshOptions.password]); + }); + + privates.readyPromise = new Promise((readyResolve, readyReject) => { + const onClose = () => { + this.logger.error('Unable to establish connection...', { reqId }); + // eslint-disable-next-line no-use-before-define + client.removeListener('ready', onReady); + cleanup.call(this, client); + readyReject(new Error('Unable to establish connection...')); + }; + const onReady = () => { + client.removeListener('close', onClose); + this.logger.info('Connection established!', { reqId }); + + privates.closePromise = new Promise((closeResolve) => { + client.once('close', () => { + this.logger.info('Connection closed!'); + cleanup.call(this, client); + closeResolve(); + }); + }); + readyResolve(); + }; + client.once('close', onClose); + client.once('ready', onReady); + }); +} + +/** + * Init SFTP connection + * + * @this SSHConnector + */ +function initSftp(privates, reqId) { + this.logger.info('Creating new SFTP client', { reqId }); + privates.sftpReadyPromise = new Promise((readyResolve, readyReject) => { + privates.client.sftp((err, sftp) => { + if (err) { + readyReject(err); + } else { + privates.sftp = sftp; + readyResolve(); + } + }); + }); +} + +/** + * Get next request ID + * + * @returns {number} request ID + */ +function nextReqId() { + const reqId = SSH_REQ_ID; + SSH_REQ_ID += 1; + return reqId; +} + +/** + * Terminate connection once operation completed + * + * @param {SSHConnector} conn + * @param {Promise} promise + * + * @returns {Promise} + */ +function terminateOnceDone(conn, promise) { + let err; + let ret; + return promise.then((_ret) => { + ret = _ret; + }) + .catch((execErr) => { + err = execErr; + }) + .then(() => conn.terminate()) + .then(() => (err ? Promise.reject(err) : Promise.resolve(ret))); +} + +/** + * SSH Connector + * + * @property {string} encoding - stderr and stdout encoding + * @property {RemoteHost} host - remote host + * @property {Logger} logger - logger + * @property {PromiseRetryOptions} retryOptions - retry options + * @property {SSHClientOptions} sshOptions - SSH options + * @property {boolean} terminated - 'true' if connector permanently terminated + */ +class SSHConnector { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {SSHClientOptions} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, options) { + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + const encoding = options.encoding; + delete options.encoding; + + const retryOptions = Object.assign({}, options.retry || {}); + delete options.retry; + + Object.defineProperties(this, { + encoding: { + value: encoding + }, + host: { + value: host + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + }, + sshOptions: { + get() { return Object.assign({}, options); } + }, + terminated: { + get() { return PRIVATES.get(this).terminated; } + } + }); + PRIVATES.set(this, { + client: null, + closeConnectorPromise: null, + closePromise: null, + readyPromise: null, + sftp: null, + terminated: false, + terminatePromise: null + }); + + this.logger = (options.logger || this.host.logger).getChild(`ssh:${this.sshOptions.port}`); + delete options.logger; + } + + /** + * Close current connection. + * + * Note: even if closed it still can be active if there are a lot of concurrent + * attempts to write data (new client will be created) + * + * @returns {Promise} resolved once closed + * @rejects {Error} when no client to close + */ + close() { + return new Promise((resolve, reject) => { + const privates = PRIVATES.get(this); + if (!this.terminated) { + if (!this.closeConnectorPromise) { + this.logger.info('Closing connector...'); + const closeConnectorPromise = Promise.resolve() + .then(() => { + if (privates.client) { + privates.client._sock.unref(); + privates.client.destroy(); + return privates.closePromise.then(() => { + if (privates.closeConnectorPromise === closeConnectorPromise) { + privates.closeConnectorPromise = null; + } + }); + } + this.logger.info('No client to close!'); + return Promise.resolve(); + }); + privates.closeConnectorPromise = closeConnectorPromise; + } + privates.closeConnectorPromise.then(resolve, reject); + } else { + privates.terminatePromise.then(resolve, reject); + } + }); + } + + /** + * Copy local file to remote + * + * @param {string} localPath - local path to file + * @param {string} remotePath - remote path to file + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once file copied + */ + copyFileToRemote(localPath, remotePath, retryOpts) { + return execSftpCommand.call(this, 'fastPut', [localPath, remotePath], retryOpts); + } + + /** + * Exec command + * + * @param {string} command - exec command on remote host + * @param {Object} [commandOptions] - command options, see ssh2 docs + * @param {string} [commandOptions.encoding] - encoding for stdout and stderr + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved with response once command executed + */ + exec(command, commandOptions, retryOpts) { + commandOptions = Object.assign({}, commandOptions || {}); + retryOpts = assignDefaults(Object.assign({}, retryOpts || {}), this.retryOptions); + + const encoding = commandOptions.encoding || this.encoding; + delete commandOptions.encoding; + + const reqId = nextReqId(); + + this.logger.info('Executing command (+metadata info for debug):', { + command, + reqId, + retryOpts + }); + + const originCb = retryOpts.callback; + retryOpts.callback = (error) => { + if (this.terminated) { + return false; + } + this.logger.error('Error on attempt to execute command... Going to retry if re-try attempts left', error, { + reqId, + tries: retryOpts.tries + }); + return originCb ? originCb(error) : true; + }; + + // eslint-disable-next-line consistent-return + return promiseUtil.retry(() => checkConnection.call(this, reqId) + .then(() => new Promise((execResolve, execReject) => { + const privates = PRIVATES.get(this); + if (!privates.client) { + execReject(new Error('No client to use to execute command!')); + } else { + const client = privates.client; + const isOk = client.exec(command, commandOptions || {}, (err, stream) => { + if (err) { + execReject(err); + } else { + const ret = { + code: null, + command, + signal: '', + stderr: '', + stdout: '' + }; + + stream.stderr.on('data', (data) => { + ret.stderr += data.toString(encoding); + }); + stream.on('data', (data) => { + ret.stdout += data.toString(encoding); + }); + stream.on('close', (code, signal) => { + this.logger.info('Command executed:', { + reqId, + ret + }); + ret.code = code; + ret.signal = signal; + execResolve(ret); + }); + } + }); + if (isOk === false) { + execReject(new Error('SSHClientError: should wait for "continue" event before proceeding with next command. Try later')); + } + } + })), + retryOpts); + } + + /** + * Check if remote path exists + * + * @param {string} path - remote path + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved with 'true' if remote path exists + */ + exists(path, retryOpts) { + return execSftpCommand.call(this, 'exists', [path], retryOpts); + } + + /** + * Create remote directory + * + * @param {string} path - remote path + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once remote directory created + */ + mkdir(path, retryOpts) { + return execSftpCommand.call(this, 'mkdir', [path], retryOpts); + } + + /** + * Create remote directory if not exists + * + * @param {string} path - remote path + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once remote directory created + */ + mkdirIfNotExists(path, retryOpts) { + return execSftpCommand.call(this, 'exists', [path], retryOpts) + .then((exists) => (exists + ? Promise.resolve() + : execSftpCommand.call(this, 'mkdir', [path], retryOpts))); + } + + /** + * Remote remote directory + * + * @param {string} path - remote path + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once remote directory removed + */ + rmdir(path, retryOpts) { + return execSftpCommand.call(this, 'rmdir', [path], retryOpts); + } + + /** + * Remote remote directory if exists + * + * @param {string} path - remote path + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once remote directory removed + */ + rmdirIfExists(path, retryOpts) { + return execSftpCommand.call(this, 'exists', [path], retryOpts) + .then((exists) => (exists + ? execSftpCommand.call(this, 'rmdir', [path], retryOpts) + : Promise.resolve())); + } + + /** + * Terminate connector permanently + * + * @returns {Promise} resolved once terminated + * @rejects {Error} when no client to close + */ + terminate() { + return new Promise((resolve, reject) => { + const privates = PRIVATES.get(this); + if (!this.terminated) { + this.logger.info('Terminating connector...'); + privates.terminated = true; + privates.terminatePromise = Promise.resolve() + .then(() => { + if (privates.client) { + privates.client._sock.unref(); + privates.client.destroy(); + return privates.closePromise.then(() => { + this.logger.info('Terminated!'); + }); + } + this.logger.info('No client to terminate'); + return Promise.resolve(); + }); + } + privates.terminatePromise.then(resolve, reject); + }); + } + + /** + * Unlink remote path + * + * @param {string} path - remote path + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once remote path unlinked + */ + unlink(path, retryOpts) { + return execSftpCommand.call(this, 'unlink', [path], retryOpts); + } + + /** + * Unlink remote path if exists + * + * @param {string} path - remote path + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once remote path unlinked + */ + unlinkIfExists(path, retryOpts) { + return execSftpCommand.call(this, 'exists', [path], retryOpts) + .then((exists) => (exists + ? execSftpCommand.call(this, 'unlink', [path], retryOpts) + : Promise.resolve())); + } + + /** + * Write data to remote file + * + * @param {string} path - remote path + * @param {Buffer | string} data - data to write + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once data written to remove file + */ + writeToFile(path, data, retryOpts) { + return execSftpCommand.call(this, 'writeFile', [path, data], retryOpts); + } +} + +/** + * SSH Connector Manager + * + * @property {RemoteHost} host - remote host + * @property {Logger} logger - logger + * @property {PromiseRetryOptions} retryOptions - retry options + * @property {SSHClientOptions} sshOptions - SSH options + */ +class SSHConnectorManager { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {SSHClientOptions} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, options) { + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + const retryOptions = Object.assign({}, options.retry || {}); + delete options.retry; + + this.logger = (options.logger || this.host.logger); + delete options.logger; + + Object.defineProperties(this, { + host: { + value: host + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + }, + sshOptions: { + get() { return Object.assign({}, options); } + } + }); + } + + /** + * Copy local file to remote + * + * @param {string} localPath - local path to file + * @param {string} remotePath - remote path to file + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once file copied + */ + copyFileToRemote(localPath, remotePath, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.copyFileToRemote(localPath, remotePath)); + } + + /** + * Create new SSH Connector instance + * + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {SSHConnector} instance + */ + create(options) { + return new SSHConnector(this.host, assignDefaults( + Object.assign({}, options || {}), + Object.assign(this.sshOptions, { + logger: this.logger, + retry: this.retryOptions + }) + )); + } + + /** + * Create new SSH Connector instance and save as property + * + * @param {string} name - name to use to save instance as property + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {SSHConnector} instance + */ + createAndSave(name) { + if (hasIn(this, name)) { + throw new Error(`Can't assign SSHConnector to '${name}' property - exists already!`); + } + Object.defineProperty(this, name, { + configurable: true, + value: this.create.apply(this, Array.from(arguments).slice(1)) + }); + return this[name]; + } + + /** + * Exec command + * + * @param {string} command - exec command on remote host + * @param {Object} [commandOptions] - command options, see ssh2 docs + * @param {string} [commandOptions.encoding] - encoding for stdout and stderr + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved with response once command executed + */ + exec(command, commandOptions, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.exec(command, commandOptions)); + } + + /** + * Check if remote path exists + * + * @param {string} path - remote path + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved with 'true' if remote path exists + */ + exists(path, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.exists(path)); + } + + /** + * Create remote directory + * + * @param {string} path - remote path + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once remote directory created + */ + mkdir(path, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.mkdir(path)); + } + + /** + * Create remote directory if not exists + * + * @param {string} path - remote path + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once remote directory created + */ + mkdirIfNotExists(path, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.mkdirIfNotExists(path)); + } + + /** + * Remote remote directory + * + * @param {string} path - remote path + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once remote directory removed + */ + rmdir(path, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.rmdir(path)); + } + + /** + * Remote remote directory if exists + * + * @param {string} path - remote path + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once remote directory removed + */ + rmdirIfExists(path, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.rmdirIfExists(path)); + } + + /** + * Unlink remote path + * + * @param {string} path - remote path + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once remote path unlinked + */ + unlink(path, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.unlink(path)); + } + + /** + * Unlink remote path if exists + * + * @param {string} path - remote path + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once remote path unlinked + */ + unlinkIfExists(path, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.unlinkIfExists(path)); + } + + /** + * Write data to remote file + * + * @param {string} path - remote path + * @param {Buffer | string} data - data to write + * @param {SSHClientOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once data written to remove file + */ + writeToFile(path, data, options) { + const conn = this.create(options); + return terminateOnceDone(conn, conn.writeToFile(path, data)); + } +} + +module.exports = { + SSHConnector, + SSHConnectorManager +}; + +/** + * @typedef SSHExecResponse + * @type {Object} + * @property {number | null} code - return code + * @property {string} command - command + * @property {string | null} signal - signal interrupted a process + * @property {string} stderr - stderr + * @property {string} stdout - stdout + */ +/** + * @typedef SSHClientOptions + * @type {Object} + * @property {string} [encoding = 'utf8'] - stderr and stdout encoding + * @property {string} [password] - password for password-based user authentication + * @property {integer} [port = 22] - port + * @property {Buffer | string} [privateKey] - private key for either key-based or hostbased user authentication + * @property {string} [username] - username for authentication + */ diff --git a/test/functional/shared/remoteHost/tcpConnector.js b/test/functional/shared/remoteHost/tcpConnector.js new file mode 100644 index 00000000..f9b01d8a --- /dev/null +++ b/test/functional/shared/remoteHost/tcpConnector.js @@ -0,0 +1,522 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const hasIn = require('lodash/hasIn'); +const net = require('net'); + +const getArgByType = require('../utils/misc').getArgByType; +const promiseUtil = require('../utils/promise'); + +/** + * @module test/functional/shared/remoteHost/tcpConnector + * + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("./remoteHost").RemoteHost} RemoteHost + */ + +const DEFAULTS = Object.freeze({ + allowHalfOpen: false, + family: '4', + retry: Object.freeze({ + delay: 100, + maxTries: 10 + }), + unref: false +}); + +const PRIVATES = new WeakMap(); + +let TCP_REQ_ID = 0; + +/** + * Get next request ID + * + * @returns {number} request ID + */ +function nextReqId() { + const reqId = TCP_REQ_ID; + TCP_REQ_ID += 1; + return reqId; +} + +/** + * TCP Connector + * + * @property {Array} data - received data + * @property {RemoteHost} host - remote host + * @property {Logger} logger - logger + * @property {integer} port - remote port + * @property {PromiseRetryOptions} retryOptions - retry options + * @property {TCPConnectorOptions} socketOptions - socket options + * @property {boolean} terminated - 'true' if connector permanently terminated + */ +class TCPConnector { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {integer} port - port + * @param {TCPConnectorOptions} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, port, options) { + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + const retryOptions = Object.assign({}, options.retry || {}); + + Object.defineProperties(this, { + data: { + get() { return PRIVATES.get(this).data.slice(0); } + }, + host: { + value: host + }, + port: { + value: port + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + }, + socketOptions: { + get() { return Object.assign({}, options); } + }, + terminated: { + get() { return PRIVATES.get(this).terminated; } + } + }); + PRIVATES.set(this, { + closeConnectorPromise: null, + closePromise: null, + data: [], + parentLogger: options.logger || this.host.logger, + readyPromise: null, + socket: null, + terminated: false, + terminatePromise: null + }); + this.logger = PRIVATES.get(this).parentLogger.getChild(`tcp:${port}`); + + delete options.logger; + delete options.retry; + } + + /** + * Close current connection. + * + * Note: even if closed it still can be active if there are a lot of concurrent + * attempts to write data (new socket will be created) + * + * @returns {Promise} resolved once closed + * @rejects {Error} when no socket to close + */ + close() { + return new Promise((resolve, reject) => { + const privates = PRIVATES.get(this); + if (!this.terminated) { + if (!this.closeConnectorPromise) { + this.logger.info('Closing connector...'); + const closeConnectorPromise = Promise.resolve() + .then(() => { + if (privates.socket) { + privates.socket.destroy(); + return privates.closePromise.then(() => { + if (privates.closeConnectorPromise === closeConnectorPromise) { + privates.closeConnectorPromise = null; + } + }); + } + this.logger.info('No socket to close!'); + return Promise.resolve(); + }); + privates.closeConnectorPromise = closeConnectorPromise; + } + privates.closeConnectorPromise.then(resolve, reject); + } else { + privates.terminatePromise.then(resolve, reject); + } + }); + } + + /** + * Erase data received from remote host + */ + eraseData() { + PRIVATES.get(this).data = []; + } + + /** + * Ping TCP port on remote host + * + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved when remote host is listening on the port + */ + ping(retry) { + retry = assignDefaults(Object.assign({}, retry || {}), this.retryOptions); + const reqId = nextReqId(); + this.logger.info('Ping remote port (+metadata info for debug):', { + reqId, + retry + }); + + return promiseUtil.retry(() => new Promise((resolve, reject) => { + const socketOptions = this.socketOptions; + const connOptions = { + family: socketOptions.family, + host: this.host.host, + port: this.port + }; + const socket = new net.Socket({ + allowHalfOpen: socketOptions.allowHalfOpen + }); + socket.on('connect', () => { + socket.end(); + }); + socket.on('end', () => { + this.logger.info('Port is opened!', { reqId }); + resolve(); + }); + socket.on('error', (err) => { + this.logger.info('Port is closed!', { reqId, err }); + reject(err); + }); + socket.connect(connOptions); + }), retry); + } + + /** + * Send data + * + * Note: non-guarantee delivery + * + * @param {any} data - data to send + * @param {string} [encoding = 'utf8'] - data encoding + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once data was sent + */ + send(data, encoding, retry) { + encoding = 'utf8'; + retry = {}; + if (arguments.length > 1) { + encoding = getArgByType(arguments, 'string', { fromIndex: 1, defaultValue: encoding }).value; + retry = getArgByType(arguments, 'object', { fromIndex: 1, defaultValue: retry }).value; + } + + retry = assignDefaults(Object.assign({}, retry || {}), this.retryOptions); + + const reqId = nextReqId(); + this.logger.info('Sending data (+metadata info for debug):', { + data, + encoding, + reqId, + retry + }); + + const originCb = retry.callback; + retry.callback = (error) => { + if (this.terminated) { + return false; + } + this.logger.error('Error on attempt to send data... Going to retry if re-try attempts left', error, { + reqId, + tries: retry.tries + }); + return originCb ? originCb(error) : true; + }; + + // eslint-disable-next-line consistent-return + return promiseUtil.retry(() => new Promise((resolve, reject) => { + if (this.terminated) { + this.logger.error('Attempt to write data using terminated TCP Connector!', { reqId }); + // eslint-disable-next-line no-promise-executor-return + return reject(new Error('TCPConnector was terminated')); + } + const privates = PRIVATES.get(this); + if (privates.socket === null) { + const socketOptions = this.socketOptions; + const connOptions = { + family: socketOptions.family, + host: this.host.host, + port: this.port + }; + + this.logger.info('Creating new socket', { reqId, connOptions, socketOptions }); + + const socket = new net.Socket({ + allowHalfOpen: socketOptions.allowHalfOpen + }); + privates.socket = socket; + + if (socketOptions.encoding) { + socket.setEncoding(socketOptions.encoding); + } + if (typeof socketOptions.timeout !== 'undefined') { + socket.setTimeout(socketOptions.timeout); + } + if (typeof socketOptions.noDelay === 'boolean') { + socket.noDelay(socketOptions.noDelay); + } + if (socketOptions.unref) { + socket.unref(); + } + + socket.on('data', (msg) => privates.data.push(msg)); + socket.on('end', () => { + this.logger.info('Socket received FIN packet'); + socket.end(); + }); + // The 'close' event will be called directly following 'error'. + socket.on('error', (error) => this.logger.error('Error caught', error)); + socket.on('timeout', () => { + this.logger.error('Timeout. Destroying socket'); + socket.destroy(); + }); + + const cleanup = () => { + if (privates.socket === socket) { + privates.closePromise = null; + privates.readyPromise = null; + privates.socket = null; + } + }; + + this.eraseData(); + privates.readyPromise = new Promise((readyResolve, readyReject) => { + const onClose = () => { + this.logger.error('Unable to establish connection...', { reqId }); + // eslint-disable-next-line no-use-before-define + socket.removeListener('connect', onConnect); + cleanup(); + readyReject(new Error('Unable to establish connection...')); + }; + const onConnect = () => { + socket.removeListener('close', onClose); + this.logger = PRIVATES.get(this).parentLogger.getChild(`tcp:[${socket.localAddress}]:${socket.localPort}:${this.port}`); + this.logger.info('Connection established!', { reqId }); + + privates.closePromise = new Promise((closeResolve) => { + socket.once('close', () => { + this.logger.info('Connection closed!'); + cleanup(); + closeResolve(); + }); + }); + readyResolve(); + }; + socket.once('close', onClose); + socket.once('connect', onConnect); + socket.connect(connOptions); + }); + } + // eslint-disable-next-line no-promise-executor-return + privates.readyPromise + // eslint-disable-next-line consistent-return + .then(() => new Promise((sendResolve, sendReject) => { + if (!privates.socket) { + sendReject(new Error('No socket to write data to!')); + } else { + const socket = privates.socket; + // listen for error just in case + socket.once('error', (err) => { + socket.removeListener('sent', sendResolve); + sendReject(err); + }); + socket.once('sent', () => { + this.logger.info('Data successfully written to socket!', { reqId }); + socket.removeListener('error', sendReject); + sendResolve(); + }); + socket.write(data, encoding, () => socket.emit('sent')); + } + })) + .then(resolve, reject); + }), retry); + } + + /** + * Terminate connector permanently + * + * @returns {Promise} resolved once terminated + * @rejects {Error} when no socket to close + */ + terminate() { + return new Promise((resolve, reject) => { + const privates = PRIVATES.get(this); + if (!this.terminated) { + this.logger.info('Terminating connector...'); + privates.terminated = true; + privates.terminatePromise = Promise.resolve() + .then(() => { + if (privates.socket) { + privates.socket.destroy(); + return privates.closePromise.then(() => { + this.logger.info('Terminated!'); + }); + } + this.logger.info('No socket to terminate!'); + return Promise.resolve(); + }); + } + privates.terminatePromise.then(resolve, reject); + }); + } +} + +/** + * TCP Connector Manager + * + * @property {RemoteHost} host - remote host + * @property {Logger} logger - logger + * @property {PromiseRetryOptions} retryOptions - retry options + * @property {TCPConnectorOptions} socketOptions - socket options + */ +class TCPConnectorManager { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {TCPConnectorOptions} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, options) { + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + const retryOptions = Object.assign({}, options.retry || {}); + + Object.defineProperties(this, { + host: { + value: host + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + }, + socketOptions: { + get() { return Object.assign({}, options); } + } + }); + this.logger = (options.logger || this.host.logger); + + delete options.logger; + delete options.retry; + } + + /** + * Create new TCP Connector instance + * + * @param {integer} port - remote port + * @param {TCPConnectorOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {TCPConnector} instance + */ + create(port, options) { + return new TCPConnector(this.host, port, assignDefaults( + Object.assign({}, options || {}), + Object.assign(this.socketOptions, { + logger: this.logger, + retry: this.retryOptions + }) + )); + } + + /** + * Create new TCP Connector instance and save as property + * + * @param {string} name - name to use to save instance as property + * @param {integer} port - remote port + * @param {TCPConnectorOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {TCPConnector} instance + */ + createAndSave(name) { + if (hasIn(this, name)) { + throw new Error(`Can't assign TCPConnector to '${name}' property - exists already!`); + } + Object.defineProperty(this, name, { + configurable: true, + value: this.create.apply(this, Array.from(arguments).slice(1)) + }); + return this[name]; + } + + /** + * Ping TCP port on remote host + * + * @param {integer} port - remote port + * @param {TCPConnectorOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved when remote host is listened on the port + */ + ping(port, options) { + options = {}; + if (arguments.length > 1) { + options = getArgByType(arguments, 'object', { fromIndex: 1 }).value; + } + return this.create(port, options).ping(); + } + + /** + * Send data to remote host + * + * @param {integer} port - remote port + * @param {any} data - data to send + * @param {string} [encoding = 'utf8'] - data encoding + * @param {TCPConnectorOptions} [options] - options + * @param {PromiseRetryOptions} [options.retry] - retry options + * + * @returns {Promise} resolved once data sent + */ + send(port, data, encoding, options) { + encoding = 'utf8'; + options = {}; + + if (arguments.length > 2) { + encoding = getArgByType(arguments, 'string', { fromIndex: 2 }).value; + options = getArgByType(arguments, 'object', { fromIndex: 2 }).value; + } + + const conn = this.create(port, options); + let err; + return conn.send(data, encoding) + .catch((sendErr) => { + err = sendErr; + }) + .then(() => conn.terminate()) + .then(() => (err ? Promise.reject(err) : Promise.resolve())); + } +} + +module.exports = { + TCPConnector, + TCPConnectorManager +}; + +/** + * @typedef TCPConnectorOptions + * @type {Object} + * @property {boolean} [allowHalfOpen = false] - socket won't automatically send a FIN packet + * @property {string} [encoding] - encoding for the socket + * @property {string} [family = '4'] - version of IP stack + * @property {boolean} [noDelay = true] - disables the Nagle algorithm + * @property {integer} [timeout] - timeout after 'timeout' milliseconds of inactivity + * @property {boolean} [unref = false] - allow the program to exit if + * this is the only active socket in the event system + */ diff --git a/test/functional/shared/remoteHost/udpConnector.js b/test/functional/shared/remoteHost/udpConnector.js new file mode 100644 index 00000000..53c89033 --- /dev/null +++ b/test/functional/shared/remoteHost/udpConnector.js @@ -0,0 +1,336 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const dgram = require('dgram'); +const hasIn = require('lodash/hasIn'); + +const promiseUtil = require('../utils/promise'); + +/** + * @module test/functional/shared/remoteHost/udpConnector + * + * @typedef {import("../utils/logger").Logger} Logger + * @typedef {import("../utils/promise").PromiseRetryOptions} PromiseRetryOptions + * @typedef {import("./remoteHost").RemoteHost} RemoteHost + */ + +const DEFAULTS = Object.freeze({ + retry: Object.freeze({ + delay: 100, + maxTries: 10 + }) +}); + +const PRIVATES = new WeakMap(); + +let UDP_REQ_ID = 0; + +/** + * UDP Connector + * + * @property {Array} data - received data + * @property {RemoteHost} host - remote host + * @property {Logger} logger - logger + * @property {integer} port - remote port + * @property {PromiseRetryOptions} retryOptions - retry options + * @property {boolean} terminated - 'true' if connector permanently terminated + */ +class UDPConnector { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {integer} port - port + * @param {Object} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, port, options) { + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + const retryOptions = Object.assign({}, options.retry || {}); + + Object.defineProperties(this, { + data: { + get() { return PRIVATES.get(this).data.slice(0); } + }, + host: { + value: host + }, + port: { + value: port + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + }, + terminated: { + get() { return PRIVATES.get(this).terminated; } + } + }); + PRIVATES.set(this, { + closeConnectorPromise: null, + closePromise: null, + data: [], + socket: null, + terminated: false, + terminatePromise: null + }); + this.logger = (options.logger || this.host.logger).getChild(`udp:${port}`); + } + + /** + * Close current connection. + * + * Note: even if closed it still can be active if there are a lot of concurrent + * attempts to write data (new socket will be created) + * + * @returns {Promise} resolved once closed + * @rejects {Error} when no socket to close + */ + close() { + return new Promise((resolve, reject) => { + const privates = PRIVATES.get(this); + if (!this.terminated) { + if (!this.closeConnectorPromise) { + this.logger.info('Closing connector...'); + const closeConnectorPromise = Promise.resolve() + .then(() => { + if (privates.socket) { + privates.socket.close(); + return privates.closePromise.then(() => { + if (privates.closeConnectorPromise === closeConnectorPromise) { + privates.closeConnectorPromise = null; + } + }); + } + this.logger.info('No socket to close!'); + return Promise.resolve(); + }); + privates.closeConnectorPromise = closeConnectorPromise; + } + privates.closeConnectorPromise.then(resolve, reject); + } else { + privates.terminatePromise.then(resolve, reject); + } + }); + } + + /** + * Erase data received from remote host + */ + eraseData() { + PRIVATES.get(this).data = []; + } + + /** + * Send data + * + * @param {any} data - data to send + * @param {PromiseRetryOptions} [retry] - retry options + * + * @returns {Promise} resolved once data was sent + */ + send(data, retryOpts) { + retryOpts = assignDefaults(Object.assign({}, retryOpts || {}), this.retryOptions); + + const reqId = UDP_REQ_ID; + UDP_REQ_ID += 1; + + this.logger.info('Sending data (+metadata info for debug):', { + data, + reqId, + retryOpts + }); + + const originCb = retryOpts.callback; + retryOpts.callback = (error) => { + if (this.terminated) { + return false; + } + this.logger.error('Error on attempt to send data... Going to retry if re-try attempts left', error, { + reqId, + tries: retryOpts.tries + }); + return originCb ? originCb(error) : true; + }; + + // eslint-disable-next-line consistent-return + return promiseUtil.retry(() => new Promise((resolve, reject) => { + if (this.terminated) { + this.logger.error('Attempt to write data using terminated UDP Connector!', { reqId }); + // eslint-disable-next-line no-promise-executor-return + return reject(new Error('UDPConnector was terminated')); + } + const privates = PRIVATES.get(this); + if (privates.socket === null) { + this.logger.info('Creating new socket', { reqId }); + + const socket = dgram.createSocket({ + reuseAddr: false, + type: 'udp4' + }); + socket.on('message', (msg) => privates.data.push(msg)); + socket.on('error', (error) => this.logger.error('Error caught', error)); + + this.eraseData(); + privates.socket = socket; + privates.closePromise = new Promise((closeResolve) => { + socket.once('close', () => { + this.logger.info('Connection closed!'); + if (privates.socket === socket) { + privates.closePromise = null; + privates.socket = null; + } + closeResolve(); + }); + }); + } + + privates.socket.send(data, 0, data.length, this.port, this.host.host, (err) => { + if (err) { + this.logger.error('Error caught on attempt to send data', { err, reqId }); + reject(err); + } else { + this.logger.info('Data successfully sent!', { reqId }); + resolve(); + } + }); + }), retryOpts); + } + + /** + * Terminate connector permanently + * + * @returns {Promise} resolved once terminated + * @rejects {Error} when no socket to close + */ + terminate() { + return new Promise((resolve, reject) => { + const privates = PRIVATES.get(this); + if (!this.terminated) { + this.logger.info('Terminating connector...'); + privates.terminated = true; + privates.terminatePromise = Promise.resolve() + .then(() => { + if (privates.socket) { + privates.socket.close(); + return privates.closePromise.then(() => { + this.logger.info('Terminated!'); + }); + } + this.logger.info('No socket to terminate!'); + return Promise.resolve(); + }); + } + privates.terminatePromise.then(resolve, reject); + }); + } +} + +/** + * UDP Connector Manager + * + * @property {RemoteHost} host - remote host + * @property {Logger} logger - logger + * @property {PromiseRetryOptions} retryOptions - retry options + */ +class UDPConnectorManager { + /** + * Constructor + * + * @param {RemoteHost} host - remote host + * @param {object} [options] - options + * @param {Logger} [options.logger] - logger + * @param {PromiseRetryOptions} [options.retry] - retry options + */ + constructor(host, options) { + options = assignDefaults( + Object.assign({}, options || {}), + DEFAULTS + ); + const retryOptions = Object.assign({}, options.retry || {}); + + Object.defineProperties(this, { + host: { + value: host + }, + retryOptions: { + get() { return Object.assign({}, retryOptions); } + } + }); + this.logger = (options.logger || this.host.logger); + } + + /** + * Create new UDP Connector instance + * + * @param {integer} port - remote port + * @param {PromiseRetryOptions} [options] - retry options + * + * @returns {UDPConnector} instance + */ + create(port, options) { + return new UDPConnector(this.host, port, { + logger: this.logger, + retry: assignDefaults( + Object.assign({}, options || {}), + this.retryOptions + ) + }); + } + + /** + * Create new UDP Connector instance and save as property + * + * @param {string} name - name to use to save instance as property + * @param {integer} port - remote port + * @param {PromiseRetryOptions} [options] - retry options + * + * @returns {UDPConnector} instance + */ + createAndSave(name) { + if (hasIn(this, name)) { + throw new Error(`Can't assign UDPConnector to '${name}' property - exists already!`); + } + Object.defineProperty(this, name, { + configurable: true, + value: this.create.apply(this, Array.from(arguments).slice(1)) + }); + return this[name]; + } + + /** + * Send data to remote host + * + * @param {integer} port - remote port + * @param {any} data - data to send + * @param {PromiseRetryOptions} [options] - retry options + * + * @returns {Promise} resolved once data sent + */ + send(port, data, options) { + const conn = this.create(port, options); + let err; + return conn.send(data) + .catch((sendErr) => { + err = sendErr; + }) + .then(() => conn.terminate()) + .then(() => (err ? Promise.reject(err) : Promise.resolve())); + } +} + +module.exports = { + UDPConnector, + UDPConnectorManager +}; diff --git a/test/functional/shared/testUtils/index.js b/test/functional/shared/testUtils/index.js new file mode 100644 index 00000000..2dd1789f --- /dev/null +++ b/test/functional/shared/testUtils/index.js @@ -0,0 +1,292 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); +const pathUtil = require('path'); + +const constants = require('../constants'); +const miscUtils = require('../utils/misc'); +const promiseUtils = require('../utils/promise'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +/** + * @module test/functional/shared/testUtils + * + * @typedef {import(".,/harness").BigIp} BigIp + */ + +const LISTENER_PROTOCOLS = constants.TELEMETRY.LISTENER.PROTOCOLS; + +/** + * Wrap value into array if not yet + * + * @private + * + * @param {any} val - value to wrap + * + * @returns {Array} wrapped value + */ +function toArray(val) { + return Array.isArray(val) ? val : [val]; +} + +/** + * Uninstall all TS packages + * + * @private + * + * @param {harness.BigIp} bigip - BigIp instance + * + * @returns Promise resolved once TS packages removed from F5 device + */ +function uninstallAllTSpackages(bigip) { + return promiseUtils.loopUntil((breakCb) => bigip.appLX.default.list() + .then((packages) => { + bigip.logger.info('List of installed packages', { packages }); + const tsPkg = packages.find((pkg) => pkg.packageName.includes('f5-telemetry')); + if (!tsPkg) { + return breakCb(); + } + bigip.logger.info('Uninstalling Telemetry Streaming package', { tsPkg }); + return bigip.appLX.default.uninstall(tsPkg.packageName); + })) + .then(() => bigip.telemetry.installed()) + .then((isOk) => { + if (!isOk) { + bigip.logger.info('Telemetry Streaming is not installed'); + return Promise.resolve(); + } + return Promise.reject(new Error('Unable to uninstall Telemetry Streaming')); + }); +} + +module.exports = { + /** + * Should configure TS using provided declaration (unit test) + * + * @param {function} itFn - mocha 'it'-like function + * @param {Array | BigIp} bigips - BIG-IP(s) to configure + * @param {Object | function(BigIp)} declaration - declaration to send or callback + * tto call to get declaration for specific BIG-IP. 'null | undefined' to skip + */ + shouldConfigureTS(itFn, bigips, declaration) { + if (typeof itFn !== 'function') { + declaration = bigips; + bigips = itFn; + itFn = it; + } + + declaration = typeof declaration === 'function' + ? declaration + : () => miscUtils.deepCopy(declaration); + + toArray(bigips).forEach((bigip) => itFn( + `should configure TS - ${bigip.name}`, + function test() { + const decl = declaration(bigip); + if (decl === null || typeof decl === 'undefined') { + this.skip('No declaration to post'); + return Promise.resolve(); + } + return bigip.telemetry.declare(decl); + } + )); + }, + + /** + * Should install TS package (unit test) + * + * @param {function} itFn - mocha 'it'-like function + * @param {Array | BigIp} bigips - BIG-IP(s) to configure + * @param {Object | function(BigIp)} pkg - pkg to install or callback + * tto call to get pkg for specific BIG-IP. 'null | undefined' to skip + */ + shouldInstallTSPackage(itFn, bigips, pkg) { + if (typeof itFn !== 'function') { + pkg = bigips; + bigips = itFn; + itFn = it; + } + + pkg = typeof pkg === 'function' ? pkg : () => miscUtils.deepCopy(pkg); + + toArray(bigips).forEach((bigip) => { + let installed = false; + itFn(`should install TS package - ${bigip.name}`, () => { + const pkgToInstall = pkg(bigip); + if (pkgToInstall === null || typeof pkgToInstall === 'undefined') { + this.skip('No TS package to install'); + return Promise.resolve(); + } + + return bigip.appLX.default.list() + .then((packages) => { + bigip.logger.info('List of installed packages', { packages }); + return bigip.appLX.default.install( + pathUtil.join(pkgToInstall.path, pkgToInstall.name), + installed + ); + }) + .then(() => promiseUtils.sleep(1000)) + .then(() => { + installed = true; + return bigip.telemetry.installed(); + }) + .then((isOk) => { + if (isOk) { + bigip.logger.info('Telemetry Streaming installed!'); + return Promise.resolve(); + } + return Promise.reject(new Error('Unable to install Telemetry Streaming')); + }); + }); + }); + }, + + /** + * Should remove pre-existing TS declaration (unit test) + * + * @param {function} itFn - mocha 'it'-like function + * @param {Array | BigIp} bigips - BIG-IP(s) to configure + */ + shouldRemovePreExistingTSDeclaration(itFn, bigips) { + if (typeof itFn !== 'function') { + bigips = itFn; + itFn = it; + } + + toArray(bigips).forEach((bigip) => itFn( + `should remove pre-existing TS declaration - ${bigip.name}`, + () => bigip.telemetry.installed() + .then((isOk) => { + if (!isOk) { + bigip.logger.info('Telemetry Streaming is not installed'); + return Promise.resolve(); + } + bigip.logger.info('Telemetry Streaming is installed already! Need to cleanup config before uninstall'); + return bigip.telemetry.declare({ class: 'Telemetry' }) + .then((response) => bigip.logger.info('Existing declaration', { response })) + // should wait a bit to apply changes + .then(() => promiseUtils.sleep(1000)); + }) + )); + }, + + /** + * Should remove pre-existing TS package (unit test) + * + * @param {function} itFn - mocha 'it'-like function + * @param {Array | BigIp} bigips - BIG-IP(s) to configure + */ + shouldRemovePreExistingTSPackage(itFn, bigips) { + if (typeof itFn !== 'function') { + bigips = itFn; + itFn = it; + } + + toArray(bigips).forEach((bigip) => itFn( + `should remove pre-existing TS package - ${bigip.name}`, + () => uninstallAllTSpackages(bigip) + .catch((error) => { + bigip.logger.error('Unable to verify package uninstall due following error', error); + return promiseUtils.sleepAndReject(5000, error); // sleep before retry + }) + )); + }, + + /** + * Send data to Event Listener (unit test) + * + * @param {function} itFn - mocha 'it'-like function + * @param {Array | BigIp} bigips - BIG-IP(s) to configure + * @param {Any | function(BigIp)} message - message to send or callback + * tto call to get message for specific BIG-IP. 'null | undefined' to skip + * @param {Object} [options] - options + * @param {Integer} [options.delay = 100] - delay before sending next message + * @param {Integer} [options.numberOfMsg = 10] - number of messages to send to BIG-IP (For each port and protocol) + * @param {Array | string} [options.protocol = ['udp', 'tcp']] - protocol + * @param {Array | integer} [options.port = 6514] - port + */ + shouldSendListenerEvents(itFn, bigips, message, options) { + if (typeof itFn !== 'function') { + options = message; + message = bigips; + bigips = itFn; + itFn = it; + } + + options = assignDefaults(options || {}, { + delay: 100, + numberOfMsg: 10, + port: constants.TELEMETRY.LISTENER.PORT.DEFAULT, + protocol: LISTENER_PROTOCOLS + }); + + message = typeof message === 'function' + ? message + : () => miscUtils.deepCopy(message); + + const protocol = Array.isArray(options.protocol) ? options.protocol : [options.protocol]; + const port = Array.isArray(options.port) ? options.port : [options.port]; + + protocol.forEach((proto) => port.forEach((p) => toArray(bigips).forEach((bigip) => itFn( + `should send events to TS Event Listener (to ${proto.toUpperCase()}:${p}) - ${bigip.name}`, + function test() { + let idx = 0; + return promiseUtils.loopUntil((breakCb) => { + if (idx >= options.numberOfMsg) { + return breakCb(); + } + idx += 1; + + const msg = message(bigip, proto, p, idx); + if (msg === null || typeof msg === 'undefined') { + this.skip('No message to send'); + return breakCb(); + } + return bigip[proto.toLowerCase()].send(p, msg) + .then(() => { + if (idx < options.numberOfMsg && options.delay) { + bigip.logger.info(`Sleep for ${options.delay}ms. before sending next message (${idx} out of ${options.numberOfMsg})`); + return promiseUtils.sleep(options.delay); + } + return Promise.resolve(); + }); + }); + } + )))); + }, + + /** + * Should verify TS package installation (unit test) + * + * @param {function} itFn - mocha 'it'-like function + * @param {Array | BigIp} bigips - BIG-IP(s) to configure + */ + shouldVerifyTSPackageInstallation(itFn, bigips) { + if (typeof itFn !== 'function') { + bigips = itFn; + itFn = it; + } + + toArray(bigips).forEach((bigip) => itFn( + `should verify TS package installation - ${bigip.name}`, + () => bigip.telemetry.version() + .then((verInfo) => { + bigip.logger.info('Telemetry Streaming version info', { verInfo }); + assert.notStrictEqual(verInfo.version, undefined, 'should have "version" property'); + }) + )); + } +}; diff --git a/test/functional/shared/util.js b/test/functional/shared/util.js deleted file mode 100644 index 7c953d4c..00000000 --- a/test/functional/shared/util.js +++ /dev/null @@ -1,538 +0,0 @@ -/* - * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for - * license terms. Notwithstanding anything to the contrary in the EULA, Licensee - * may copy and modify this software product for its internal business purposes. - * Further, Licensee may upload, publish and distribute the modified version of - * the software product on devcentral.f5.com. - */ - -'use strict'; - -const Ajv = require('ajv'); -const fs = require('fs'); -const icrdk = require('icrdk'); // eslint-disable-line import/no-extraneous-dependencies -const net = require('net'); -const request = require('request'); -const SSHClient = require('ssh2').Client; // eslint-disable-line import/no-extraneous-dependencies - -const constants = require('./constants'); -const logger = require('../../winstonLogger').logger; - -/** - * Allows calling makeRequest with retryOptions - * - * @param {Function} makeRequest - function call to makeRequest - * @param {Number} interval - time in ms between retries - * @param {Number} maxRetries - maximum number of retry attempts - * @returns {Promise} - */ - -const makeRequestWithRetry = function (makeRequest, interval, maxRetries) { - return new Promise((resolve, reject) => { - makeRequest() - .then(resolve) - .catch((error) => { - setTimeout(() => { - if (maxRetries <= 1) { - error.message = `Maximum retries reached. Last error: ${error.message}`; - reject(error); - return; - } - makeRequestWithRetry(makeRequest, interval, maxRetries - 1) - .then(resolve, reject); - }, interval); - }); - }); -}; - -module.exports = { - - makeRequestWithRetry, - logger, - - /** Create folder (sync method) - * - * @param {String} fpath - path to folder - */ - createDir(fpath) { - if (!fs.existsSync(fpath)) { - try { - fs.mkdirSync(fpath); - } catch (err) { - if (err.code !== 'EEXIST') { - throw err; - } - } - } - }, - - /** - * Stringify a message - * - * @param {Object|String} msg - message to stringify - * - * @returns {Object} Stringified message - */ - stringify(msg) { - if (typeof msg === 'object') { - try { - msg = JSON.stringify(msg); - } catch (e) { - // just leave original message intact - } - } - return msg; - }, - - /** - * Deep copy - * - * @param {Object} obj - data to deep copy - * - * @returns {Object} Copied object - */ - deepCopy(obj) { - return JSON.parse(JSON.stringify(obj)); - }, - - /** - * Get package details - * - * @returns {Object} { name: 'foo.rpm', path: '/tmp/foo.rpm' } - */ - getPackageDetails() { - // default to new build directory if it exists, otherwise use dist directory - const dir = `${__dirname}/../../../dist`; - - const distFiles = fs.readdirSync(dir); - const packageFiles = distFiles.filter((f) => f.endsWith('.rpm')); - - // get latest rpm file (by timestamp since epoch) - // note: this might not work if the artifact resets the timestamps - const latest = { file: null, time: 0 }; - packageFiles.forEach((f) => { - const fStats = fs.lstatSync(`${dir}/${f}`); - if (fStats.birthtimeMs >= latest.time) { - latest.file = f; - latest.time = fStats.birthtimeMs; - } - }); - const packageFile = latest.file; - if (!packageFile) { - throw new Error(`Unable to find RPM in ${dir}`); - } - - return { name: packageFile, path: dir }; - }, - - /** - * Perform HTTP request - * - * @param {String} host - HTTP host - * @param {String} uri - HTTP uri - * @param {Object} options - function options - * @param {Integer} [options.port] - HTTP port, default is 443 - * @param {String} [options.protocol] - HTTP protocol, default is https - * @param {String} [options.method] - HTTP method, default is GET - * @param {String} [options.body] - HTTP body - * @param {Object} [options.headers] - HTTP headers - * @param {Boolean} [otions.rawResponse] - Whether or not to return raw HTTP response. Default=false - * - * @returns {Object} Returns promise resolved with response - */ - makeRequest(host, uri, options) { - options = options || {}; - const port = options.port === undefined ? constants.REQUEST.PORT : options.port; - const protocol = options.protocol === undefined ? constants.REQUEST.PROTOCOL : options.protocol; - - host = host.endsWith('/') ? host.slice(0, host.length - 1) : host; - uri = uri || ''; - uri = uri.startsWith('/') ? uri : `/${uri}`; - - const fullUri = `${protocol}://${host}:${port}${uri}`; - const requestOptions = { - uri: fullUri, - method: options.method || 'GET', - body: options.body ? this.stringify(options.body) : undefined, - headers: options.headers || {}, - strictSSL: false - }; - - return new Promise((resolve, reject) => { - request(requestOptions, (err, res, body) => { - if (err) { - reject(new Error(`HTTP error for '${fullUri}' : ${err}`)); - } else if (res.statusCode >= 200 && res.statusCode <= 299) { - if (options.rawResponse) { - resolve(res); - } else { - try { - resolve(JSON.parse(body)); - } catch (e) { - resolve(body); - } - } - } else { - const msg = `Bad status code: ${res.statusCode} ${res.statusMessage} ${res.body} for '${fullUri}'`; - err = new Error(msg); - err.statusCode = res.statusCode; - err.statusMessage = res.statusMessage; - reject(err); - } - }); - }); - }, - - /** - * Get auth token - * - * @param {String} host - host - * @param {String} username - username - * @param {String} password - password - * @param {String} port - port - * @returns {Promise} Returns promise resolved with auth token: { token: 'token' } - */ - getAuthToken(host, username, password, port) { - const uri = '/mgmt/shared/authn/login'; - const body = JSON.stringify({ - username, - password, - loginProviderName: 'tmos' - }); - const postOptions = { - method: 'POST', - port, - body - }; - - return this.makeRequest(host, uri, postOptions) - .then((data) => ({ token: data.token.token })) - .catch((err) => { - const msg = `getAuthToken: ${err}`; - throw new Error(msg); - }); - }, - - /** - * Install ILX package - * - * @param {String} host - host - * @param {String} authToken - auth token - * @param {String} file - local file (RPM) to install - * - * @returns {Promise} Returns promise resolved upon completion - */ - installPackage(host, authToken, file, port) { - const opts = { - HOST: host, - AUTH_TOKEN: authToken, - PORT: port - }; - - return new Promise((resolve, reject) => { - icrdk.deployToBigIp(opts, file, (err) => { - if (err) { - // resolve if error is because the package is already installed - // in that case error is of type 'string' - instead of in .message - if (process.env[constants.ENV_VARS.TEST_CONTROLS.REUSE_INSTALLED_PACKAGE] !== undefined - && /already installed/.test(err)) { - resolve(); - } else { - reject(err); - } - } else { - resolve(); - } - }); - }); - }, - - /** - * Get list of installed ILX packages - * @param {String} host - host - * @param {String} authToken - auth token - * - * @returns {Promise} Returns promise resolved upon completion - */ - getInstalledPackages(host, authToken) { - // icrdk bug - should pass headers and should send additional requests - const opts = { - HOST: host, - headers: { - 'x-f5-auth-token': authToken - } - }; - const self = this; - - return new Promise((resolve, reject) => { - function checkDataAndRetry(data) { - if (data.queryResponse) { - resolve(data.queryResponse); - } else if (data.selfLink) { - const uri = data.selfLink.replace('https://localhost', ''); - setTimeout(() => { - self.makeRequest(host, uri, { headers: opts.headers }) - .then(checkDataAndRetry); - }, 300); - } else { - reject(new Error(`Unable to fetch data. Unexpected response: ${JSON.stringify(data)}`)); - } - } - - icrdk.queryInstalledPackages(opts, (err, queryResults) => { - if (err) { - reject(err); - } else { - checkDataAndRetry(queryResults); - } - }); - }); - }, - - /** - * Uninstall ILX package - * - * @param {String} host - host - * @param {String} authToken - auth token - * @param {String} pkg - package to remove from device - * - * @returns {Promise} Returns promise resolved upon completion - */ - uninstallPackage(host, authToken, pkg) { - const opts = { - HOST: host, - AUTH_TOKEN: authToken - }; - - return new Promise((resolve, reject) => { - icrdk.uninstallPackage(opts, pkg, (err) => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - }); - }, - - /** - * Perform remote command (over ssh) - * - * @param {String} host - host - * @param {String} username - username - * @param {String} command - command to run - * @param {Object} options - function options - * @param {Integer} [options.port] - port - * @param {Integer} [options.password] - password (use this or privateKey) - * @param {Integer} [options.privateKey] - path to private key - * - * @returns {Promise} Returns promise resolved with response - */ - performRemoteCmd(host, username, command, options) { - options = options || {}; - - const conn = new SSHClient(); - return new Promise((resolve, reject) => { - let response = ''; - conn.on('ready', () => { - conn.exec(command, (err, stream) => { - if (err) throw err; - - stream.on('data', (data) => { - response += data.toString('utf8'); - }); - stream.on('close', () => { - conn.end(); - }); - stream.stderr.on('data', (data) => { - reject(new Error(data.toString('utf8'))); - }); - }); - }) - .connect({ - host, - port: options.port || 22, - username, - password: options.password || null, - privateKey: options.privateKey ? fs.readFileSync(options.privateKey) : null - }); - - conn.on('end', () => { - resolve(response); - }); - }); - }, - - /** - * Get host(s) - info provided in one of two ways - * - *Harness File* - file: look for example test/deployment/example_harness_facts.json - * - *Environment Vars* - constants contains var for IP (1+), USER, PWD - * - * @param {String} harnessType - type of harness to query for: BIGIP|CONSUMER - * - * @returns {Object} Returns [ { ip: x.x.x.x, username: admin, password: admin } ] - */ - getHosts(harnessType) { - let hosts; - let envVars; - - if (harnessType === 'BIGIP') { - envVars = constants.ENV_VARS.TEST_HARNESS; - } else if (harnessType === 'CONSUMER') { - envVars = constants.ENV_VARS.CONSUMER_HARNESS; - } - - const testHarnessFile = envVars.FILE ? process.env[envVars.FILE] : null; - if (testHarnessFile && fs.existsSync(testHarnessFile)) { - let filter; - if (harnessType === 'BIGIP') { - filter = (item) => item.is_f5_device && item.type === 'bigip'; - } else { - filter = (item) => !item.is_f5_device; - } - // eslint-disable-next-line import/no-dynamic-require, global-require - hosts = require(testHarnessFile).filter(filter).map((item) => { - if (item.is_f5_device) { - item = { - ip: item.admin_ip, - username: item.f5_rest_user.username, - password: item.f5_rest_user.password, - hostname: item.f5_hostname, - hostalias: item.f5_hostname.substring(item.f5_hostname.indexOf('bigip'), item.f5_hostname.indexOf('.')) - }; - } else { - item = { - ip: item.admin_ip, - username: item.ssh_user.username, - password: item.ssh_user.password - }; - } - return item; - }); - } else if (envVars && envVars.IP && process.env[envVars.IP]) { - // straight up environment variables - could be 1+ hosts: x.x.x.x,x.x.x.y - hosts = process.env[envVars.IP].split(',').map((host) => ({ - ip: host, - username: process.env[envVars.USER], - password: process.env[envVars.PWD] - })); - // end environment variables - } else { - const msg = 'Error: Please provide appropriate test harness environment variables'; - logger.error(msg); - throw new Error(msg); - } - return hosts; - }, - - /** - * Send event - send msg using tcp - * - * @param {String} host - host where event should be sent - * @param {String} msg - msg to send - * - * @returns {Promise} Returns promise resolved on sent message - */ - sendEvent(host, msg) { - const port = constants.EVENT_LISTENER_DEFAULT_PORT; - - return new Promise((resolve, reject) => { - const client = net.createConnection({ host, port }, () => { - logger.info(`Sending following message to ${host} [port=${port}]`, { msg }); - client.write(msg); - client.end(); - }); - client.on('end', () => { - resolve(); - }); - client.on('error', (err) => { - reject(err); - }); - }); - }, - - /** - * Validate data against JSON schema - * - * @param {String} data - data to validate - * @param {String} schema - JSON schema to use during validation - * - * @returns {Boolean|Object} Returns true on successful validation or object with errors - */ - validateAgainstSchema(data, schema) { - const ajv = new Ajv({ useDefaults: true }); - const validator = ajv.compile(schema); - const valid = validator(data); - if (!valid) { - return { errors: validator.errors }; - } - return true; - }, - - /** - * Performs a POST declaration request to a device - * - * @param {Object} deviceInfo - * @param {Object} declaration - * @returns {Promise} Promise resolved with response - */ - postDeclaration(deviceInfo, declaration) { - const uri = `${constants.BASE_ILX_URI}/declare`; - const host = deviceInfo.ip; - const user = deviceInfo.username; - const password = deviceInfo.password; - const port = deviceInfo.port; - - return this.getAuthToken(host, user, password, port) - .then((data) => { - const postOptions = { - port, - method: 'POST', - headers: { - 'x-f5-auth-token': data.token - }, - body: declaration - }; - return this.makeRequest(host, uri, postOptions); - }); - }, - - /** - * Sleep for N milliseconds - * - * @param {Integer} sleepTime - number of ms. - * - * @returns {Promise} - */ - sleep(sleepTime) { - return new Promise((resolve) => { setTimeout(resolve, sleepTime); }); - }, - - /** - * Gets the full version of the Device Under Test (dut) - * - * @param {Object} dut - Device Under Test object - * @param {String} dut.ip - DUT IP address - * @param {String} dut.username - DUT username - * @param {String} dut.password - DUT password - * - * @returns {Promise} Promise resolved with full version of the DUT (ex: '14.1.4.2') - */ - getBigipVersion(dut) { - const uri = '/mgmt/tm/sys/clock'; - const host = dut.ip; - const user = dut.username; - const password = dut.password; - return this.getAuthToken(host, user, password) - .then((data) => { - const postOptions = { - method: 'GET', - headers: { - 'x-f5-auth-token': data.token - } - }; - return this.makeRequest(host, uri, postOptions); - }) - .then((response) => response.selfLink.split('ver=')[1]); - } -}; diff --git a/test/functional/shared/utils/logger.js b/test/functional/shared/utils/logger.js new file mode 100644 index 00000000..85492fda --- /dev/null +++ b/test/functional/shared/utils/logger.js @@ -0,0 +1,59 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const logger = require('../../../winstonLogger').logger; + +/** + * @module test/functional/shared/utils/logger + */ + +/** + * Create new child logger + * + * @param {string} parentPrefix - parent's prefix + * @param {string} childPrefix - child's prefix + * + * @returns {Logger} instance + */ +function getChild(parentPrefix, childPrefix) { + // eslint-disable-next-line no-use-before-define + return new Logger(`${parentPrefix}.${childPrefix}`); +} + +/** + * Logger class + */ +class Logger { + /** + * Constructor + * + * @param {string} prefix - message prefix, will be printed inside '[]' in the beginning of message + */ + constructor(prefix) { + [ + 'alert', + 'crit', + 'debug', + 'emerg', + 'error', + 'info', + 'notice', + 'warning' + ].forEach((logLevel) => Object.defineProperty(this, logLevel, { + value: logger.log.bind(logger, logLevel, `[${prefix}]`) + })); + + Object.defineProperty(this, 'getChild', { + value: getChild.bind(this, prefix) + }); + } +} + +module.exports = new Logger('main'); diff --git a/test/functional/shared/utils/misc.js b/test/functional/shared/utils/misc.js new file mode 100644 index 00000000..930be417 --- /dev/null +++ b/test/functional/shared/utils/misc.js @@ -0,0 +1,221 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const Ajv = require('ajv'); +const cloneDeep = require('lodash/cloneDeep'); +const defaultTo = require('lodash/defaultTo'); +const fs = require('fs'); +const hasKey = require('lodash/has'); + +/** + * @module test/functional/shared/utils/misc + */ + +const ENV_TRUTHY_VALS = ['true', 'yes']; + +// eslint-disable-next-line no-multi-assign +const miscUtils = module.exports = { + /** + * Cast value from process.env to specific type + * + * @param {string} type - type to cast value to + * @param {string} val - value from process.env + * + * @returns {any} value + */ + castEnvVarTo(type, val) { + if (type === 'boolean') { + val = (val || '').toLowerCase().trim(); + if (ENV_TRUTHY_VALS.indexOf(val) !== -1) { + return true; + } + val = parseInt(val, 10); + return !Number.isNaN(val) && val > 0; + } + throw new Error(`Unsupported type '${type}'`); + }, + + /** + * Create folder (sync method) + * + * @param {string} fpath - path to folder + */ + createDir(fpath) { + if (!fs.existsSync(fpath)) { + try { + fs.mkdirSync(fpath); + } catch (err) { + if (err.code !== 'EEXIST') { + throw err; + } + } + } + }, + + deepCopy: cloneDeep, + + /** + * Get first element + * @param {Object} args - 'arguments' + * @param {string} typeName - type name + * @param {Object} [options] - options + * @param {any} [options.defaultValue] - default value + * @param {number} [options.fromIndex] - the index to start the search at + * + * @returns {GetArgByTypeRet} + */ + getArgByType(args, typeName, options) { + options = defaultTo(options, {}); + const ret = { + found: false + }; + const fromIndex = defaultTo(options.fromIndex, -1); + Array.prototype.find.call(args, (elem, idx) => { + // eslint-disable-next-line valid-typeof + if (idx >= fromIndex && typeof elem === typeName) { + ret.found = true; + ret.position = idx; + ret.value = elem; + return true; + } + return false; + }); + if (!ret.found && hasKey(options, 'defaultValue')) { + ret.value = options.defaultValue; + } + return ret; + }, + + /** + * Get arg from process.env + * + * @param {string} name - env arg name + * @param {Object} [options] - options + * @param {string} [options.castTo] - cast to type + * @param {any} [options.defaultValue = undefined] - default value + * + * @returns {string | undefined} value + * @throws {Error} when no 'defaultValue' provided and process.env doesn't have such key + */ + getEnvArg(name, options) { + options = defaultTo(options, {}); + if (hasKey(process.env, name)) { + const val = process.env[name]; + return options.castTo + ? miscUtils.castEnvVarTo(options.castTo, val) + : val; + } + if (hasKey(options, 'defaultValue')) { + return options.defaultValue; + } + throw new Error(`process.env has no such property "${name}"`); + }, + + /** + * Get package details + * + * @returns {{name: string, path: string}} { name: 'foo.rpm', path: '/tmp/foo.rpm' } + */ + getPackageDetails() { + // default to new build directory if it exists, otherwise use dist directory + const dir = `${__dirname}/../../../../dist`; + + const distFiles = fs.readdirSync(dir); + const packageFiles = distFiles.filter((f) => f.endsWith('.rpm')); + + // get latest rpm file (by timestamp since epoch) + // note: this might not work if the artifact resets the timestamps + const latest = { file: null, time: 0 }; + packageFiles.forEach((f) => { + const fStats = fs.lstatSync(`${dir}/${f}`); + if (fStats.birthtimeMs >= latest.time) { + latest.file = f; + latest.time = fStats.birthtimeMs; + } + }); + const packageFile = latest.file; + if (!packageFile) { + throw new Error(`Unable to find RPM in ${dir}`); + } + + return { + name: packageFile, + path: dir + }; + }, + + /** + * Generate random string + * + * @param {integer} [length=6] - length + * + * @returns {string} random string + */ + randomString(length) { + length = arguments.length > 0 ? length : 6; + return Math.random().toString(20).slice(2, 2 + length); + }, + + /** + * Read file and try to parse its data as JSON + * + * @param {string} path - path to a file + * @param {boolean} [async = false] - sync or async + * + * @returns {any | Promise} parsed data + */ + readJsonFile(path, async) { + async = miscUtils.getArgByType(arguments, 'boolean', { + defaultValue: false, + fromIndex: 1 + }).value; + if (!async) { + const data = fs.readFileSync(path); + try { + return JSON.parse(data); + } catch (parseErr) { + throw new Error(`Unable to parse JSON data from file "${path}": ${parseErr}`); + } + } + return new Promise((resolve, reject) => { + try { + resolve(miscUtils.readJsonFile(path)); + } catch (readErr) { + reject(readErr); + } + }); + }, + + /** + * Validate data against JSON schema + * + * @param {string} data - data to validate + * @param {string} schema - JSON schema to use during validation + * + * @returns {boolean | object} true on successful validation or object with errors + */ + validateAgainstSchema(data, schema) { + const ajv = new Ajv({ useDefaults: true }); + const validator = ajv.compile(schema); + const valid = validator(data); + if (!valid) { + return { errors: validator.errors }; + } + return true; + } +}; + +/** + * @typedef GetArgByTypeRet + * @type {Object} + * @property {boolean} found - true if element found + * @property {number} [position] - element's position in 'arguments' + * @property {any} [value] - value + */ diff --git a/test/functional/shared/utils/promise.js b/test/functional/shared/utils/promise.js new file mode 100644 index 00000000..6d1759ae --- /dev/null +++ b/test/functional/shared/utils/promise.js @@ -0,0 +1,306 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +/** + * @module test/functional/shared/utils/promise + */ + +// eslint-disable-next-line no-multi-assign +const promiseUtils = module.exports = { + /** + * Returns a promise that resolves after all of the given promises have either fulfilled or rejected, + * with an array of objects that each describes the outcome of each promise. + * + * Note: original method is available on node 12.9.0+ + * + * This function is useful when you run promises that doesn't depend on each other and + * you don't want them to be in unknown state like Promise.all do when one of the + * promises was rejected. Ideally this function should be used everywhere instead of Promise.all + * + * @param {Array} + * + * @returns {Promise>} resolved once all of the + * given promises have either fulfilled or rejected + */ + allSettled(promises) { + return new Promise((resolve, reject) => { + if (!Array.isArray(promises)) { + reject(new Error(`${promises} is not an array`)); + } else { + Promise.all(promises.map((p) => Promise.resolve(p) + .then( + (val) => ({ status: 'fulfilled', value: val }), + (err) => ({ status: 'rejected', reason: err }) + ))) + .then(resolve, reject); + } + }); + }, + + /** + * Get values returned by 'allSettled' + * + * Note: when 'ignoreRejected' is true then 'undefined' will be returned for rejected promises + * to preserve order as in an original array + * + * @param {Array} statuses - array of statuses + * @param {Boolean} [ignoreRejected = false] - ignore rejected promises + * + * @returns {Array} filtered results + * @throws {Error} original rejection error + */ + getValues(statuses, ignoreRejected) { + return statuses.map((status) => { + if (!ignoreRejected && typeof status.reason !== 'undefined') { + throw status.reason; + } + return status.value; + }); + }, + + /** + * Async 'forEach' + * + * @param {Array} collection - collection of elements + * @param {ForLoopCb} callbackFn - function to execute on each element + * + * @return {Promise} resolved once finished + */ + loopForEach(collection, callbackFn) { + return new Promise((resolve, reject) => { + if (!Array.isArray(collection)) { + reject(new Error(`${collection} is not an array`)); + } else if (typeof callbackFn !== 'function') { + reject(new Error(`${callbackFn} is not a function`)); + } else if (collection.length === 0) { + resolve(); + } else { + // caching it to ignore possible mutations + const collectionLength = collection.length; + let idx = 0; + + promiseUtils.loopUntil((breakCb) => { + if (idx >= collectionLength) { + return breakCb(); + } + return Promise.resolve() + .then(() => callbackFn(collection[idx], idx, collection, breakCb)) + .then(() => { idx += 1; }); + }) + .then(resolve, reject); + } + }); + }, + + /** + * Async 'until' loop + * + * Note: + * - 'until' because `callbackFn` will be called at least once due implementation + * + * @param {LoopUntilCb} callbackFn - function to execute + * + * @return {Promise} resolved when stopped and with value returned by + * last succeed execution of 'callbackFn' + */ + loopUntil(callbackFn) { + return new Promise((resolve, reject) => { + if (typeof callbackFn !== 'function') { + reject(new Error(`${callbackFn} is not a function`)); + } else { + let lastRet; + let stopRequested = false; + const stopLoop = () => { + stopRequested = true; + }; + Object.defineProperty(stopLoop, 'called', { + get: () => stopRequested + }); + + (function next() { + if (stopLoop.called) { + resolve(lastRet); + } else { + Promise.resolve() + .then(() => callbackFn(stopLoop)) + .then((ret) => { + lastRet = ret; + return next(); + }) + .catch(reject); + } + }()); + } + }); + }, + + /** + * Function that will attempt the promise over and over again + * + * Note: + * - if no 'opts' passed to the function then 'fn' will be executed only once + * - if 'opts.maxTries' set to 1 then 'fn' will be executed only once + * - if 'opts.maxTries' set to 2+ then 'fn' will be executed 2+ times + * - if you want to know how many attempts were made then you can pass '{}' as 'opts' and + * then check 'tries' property + * - if 'opts.callback' specified then it will be executed 'opts.maxTries - 1' times + * + * @param {function} fn - function to call + * @param {PromiseRetryOptions} [opts] - options object + * + * @returns {Promise} resolved with value returned by 'fn' when succeed + */ + retry(fn, opts) { + return new Promise((resolve, reject) => { + if (typeof fn !== 'function') { + reject(new Error(`${fn} is not a function`)); + } else { + opts = opts || {}; + opts.tries = 0; + opts.maxTries = Math.abs(opts.maxTries) || 1; + + promiseUtils.loopUntil((breakCb) => Promise.resolve() + .then(() => { + opts.tries += 1; + return fn(); + }) + .then((ret) => { + breakCb(); + return ret; + }) + .catch((error) => { + if (opts.tries < opts.maxTries && (!opts.callback || opts.callback(error))) { + let delay = opts.delay || 0; + + // applying backoff after the second try only + if (opts.backoff && opts.tries > 1) { + /* eslint-disable no-restricted-properties */ + delay += opts.backoff * Math.pow(2, opts.tries - 1); + } + if (delay) { + return promiseUtils.sleep(delay); + } + return Promise.resolve(); + } + return Promise.reject(error); + })) + .then(resolve, reject); + } + }); + }, + + /** + * Sleep for N ms. + * + * @param {integer} sleepTime - number of ms. + * + * @returns {Promise} resolved once N .ms passed or rejected if canceled via .cancel() + */ + sleep(sleepTime) { + /** + * According to http://www.ecma-international.org/ecma-262/6.0/#sec-promise-executor + * executor will be called immediately (synchronously) on attempt to create Promise + */ + let cancelCb; + const promise = new Promise((resolve, reject) => { + const timeoutID = setTimeout(() => { + cancelCb = null; + resolve(); + }, sleepTime); + cancelCb = (reason) => { + cancelCb = null; + clearTimeout(timeoutID); + reject(reason || new Error('canceled')); + }; + }); + /** + * @param {Error} [reason] - cancellation reason + * + * @returns {Boolean} 'true' if cancelCb called else 'false' + */ + promise.cancel = (reason) => { + if (cancelCb) { + cancelCb(reason); + return true; + } + return false; + }; + return promise; + }, + + /** + * Sleep for N ms. and reject after it + * + * @param {integer} sleepTime - number of ms. + * @param {Error | string} [error] - Error or message to use as rejection reason + * + * @returns {Promise} resolved after timeout + */ + sleepAndReject(sleepTime, error) { + return promiseUtils.sleep(sleepTime) + .then(() => { + if (arguments.length < 2) { + error = new Error('sleepAndReject error!'); + } else if (typeof error === 'string') { + error = new Error(error); + } + return Promise.reject(error); + }); + } +}; + +/** + * "break" callback + * + * @callback BreakCb + * @property {boolean} called - returns true if loop stop was requested + * + * @returns {void} + */ +/** + * "forLoop" callback + * + * @callback ForLoopCb + * @param {any} item - the current element being processed in the array + * @param {number} [index] - the index of element in the array + * @param {Array} [array] - the array forLoop() was called upon + * @param {BreakCb} [breakCb] - callback to stop a loop + * + * @returns {void|Promise} + */ +/** + * Promise status + * + * @typedef PromiseResolutionStatus + * @type {object} + * @property {string} status - fulfilled or rejected + * @property {any} value - value returned by fulfilled promise + * @property {Error} reason - rejection reason (error object) + */ +/** Promise re-try options + * @typedef PromiseRetryOptions + * @type {object} + * @param {number} [backoff] - a backoff factor to apply between attempts after the second try + * (most errors are resolved immediately by a second try without a delay). By default 0. + * @param {function} [callback] - callback(err) to execute when function failed. + * Should return 'true' to continue 'retry' process. By default 'null'. + * @param {number} [delay] - a delay to apply between attempts. By default 0. + * @param {number} [maxDelay] - max delay + * @param {number} [maxTries] - max number of re-try attempts. By default '1'. + * @param {number} [minDelay] - min delay + */ +/** + * "loopUntil" callback + * + * @callback LoopUntilCb + * @param {BreakCb} breakCb - callback to stop a loop + * + * @returns {void|Promise} + */ diff --git a/test/functional/shared/utils/request.js b/test/functional/shared/utils/request.js new file mode 100644 index 00000000..5568f332 --- /dev/null +++ b/test/functional/shared/utils/request.js @@ -0,0 +1,251 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +const assignDefaults = require('lodash/defaults'); +const clone = require('lodash/clone'); +const request = require('request'); +const trimEnd = require('lodash/trimEnd'); +const trimStart = require('lodash/trimStart'); + +const constants = require('../constants'); +const getArgByType = require('./misc').getArgByType; +const logger = require('./logger').getChild('request'); + +/** + * @module test/functional/shared/utils/request + */ + +/* + * Helper functions for HTTP(s) requests + */ +const MAKE_REQUEST_OPTS_TO_REMOVE = [ + 'allowSelfSignedCert', + 'continueOnErrorCode', + 'expectedResponseCode', + 'fullURI', + 'host', + 'includeResponseObject', + 'json', + 'passphrase', + 'port', + 'protocol', + 'rawResponseBody', + 'username' +]; + +let REQUEST_ID = 0; + +/** + * Build URL + * + * @param {URL | string} [urlOpts] - URL config + * @param {boolean} [isProxy = false] - true if should build URL for proxy + * + * @returns {string} URL as a string + */ +function buildURL(urlOpts, isProxy) { + let url = ''; + if (typeof urlOpts === 'string') { + if (urlOpts) { + // might be a good idea to check for protocol and etc. + url = urlOpts; + } + } else if (typeof urlOpts === 'object' && !Array.isArray(urlOpts)) { + if (urlOpts.host) { + let auth = ''; + if (urlOpts.username) { + auth = urlOpts.username; + if (urlOpts.passphrase) { + auth = `${auth}:${urlOpts.passphrase}`; + } + auth = `${auth}@`; + } + const protocol = urlOpts.protocol || constants.HTTP_REQUEST.PROTOCOL; + const port = `:${urlOpts.port || constants.HTTP_REQUEST.PORT}`; + url = `${protocol}://${auth}${trimEnd(urlOpts.host, '/')}${port}`; + + if (!isProxy && urlOpts.uri) { + url = `${url}/${trimStart(urlOpts.uri, '/')}`; + } + } + } + return url; +} + +/** + * Perform HTTP request + * + * @public + * + * @example + * // host only + * makeRequest(hostStr) + * @example + * // options only + * makeRequest(optionsObj) + * @example + * // host and options + * makeRequest(hostStr, optionsObj) + * @example + * // host and uri and options + * makeRequest(hostStr, uriStr, optionsObj) + * @example + * // host and uri + * makeRequest(hostStr, uriStr) + * + * @param {string} [host] - HTTP host + * @param {string} [uri] - HTTP uri + * @param {RequestOptions} [options] - request options + * + * @returns {Promise} resolved with response + */ +module.exports = function makeRequest() { + if (arguments.length === 0) { + throw new Error('makeRequest: no arguments were passed to function'); + } + + // rest params syntax supported by node 6+ only + const host = getArgByType(arguments, 'string', { fromIndex: 0 }).value; + const uri = getArgByType(arguments, 'string', { fromIndex: 1 }).value; + let options = getArgByType(arguments, 'object', { defaultValue: {} }).value; + + options = Object.assign({}, options); + options = assignDefaults(options, { + continueOnErrorCode: false, + expectedResponseCode: [200], + includeResponseObject: false, + json: true, + logger, + method: 'GET', + port: constants.HTTP_REQUEST.PORT, + protocol: constants.HTTP_REQUEST.PROTOCOL, + rawResponseBody: false + }); + // copy complex objects that may mutate over time during request processing + options.headers = clone(options.headers); + options.expectedResponseCode = clone(options.expectedResponseCode); + + if (host) { + options.host = host; + } + if (uri) { + options.uri = uri; + } + options.strictSSL = typeof options.allowSelfSignedCert === 'undefined' + ? constants.HTTP_REQUEST.STRICT_SSL : !options.allowSelfSignedCert; + + if (options.gzip && !options.headers['Accept-Encoding']) { + options.headers['Accept-Encoding'] = 'gzip'; + } + + if (options.rawResponseBody) { + options.encoding = null; + } + + if (options.json && typeof options.body !== 'undefined') { + options.body = JSON.stringify(options.body); + } + + options.uri = options.host ? buildURL(options) : options.fullURI; + if (!options.uri) { + throw new Error('makeRequest: no fullURI or host provided'); + } + if (typeof options.proxy !== 'undefined') { + options.proxy = buildURL(options.proxy, true); + if (!options.proxy) { + delete options.proxy; + } + } + + const continueOnErrorCode = options.continueOnErrorCode; + const expectedResponseCode = Array.isArray(options.expectedResponseCode) + ? options.expectedResponseCode : [options.expectedResponseCode]; + const includeResponseObject = options.includeResponseObject; + const rawResponseBody = options.rawResponseBody; + + MAKE_REQUEST_OPTS_TO_REMOVE.forEach((key) => { + delete options[key]; + }); + + const reqId = REQUEST_ID; + REQUEST_ID += 1; + + return new Promise((resolve, reject) => { + options.logger.info('Sending request', { reqId, options }); + + // using request.get, request.post, etc. - useful during unit test mocking + request[options.method.toLowerCase()](options, (err, res, body) => { + if (err) { + options.logger.error('Request error', { reqId, err }); + reject(new Error(`HTTP error: ${err}`)); + } else { + if (!rawResponseBody) { + try { + body = JSON.parse(body); + } catch (parseErr) { + // do nothing + } + } + if (includeResponseObject === true) { + body = [body, res]; + } + options.logger.info('Got response', { + body, + reqId, + statusCode: res.statusCode, + statusMessage: res.statusMessage + }); + + if (expectedResponseCode.indexOf(res.statusCode) !== -1 || continueOnErrorCode === true) { + resolve(body); + } else { + const resErr = new Error(`Bad status code: ${res.statusCode} ${res.statusMessage || ''} for ${options.uri}`); + resErr.statusCode = res.statusCode; + resErr.statusMessage = res.statusMessage; + resErr.response = res; + reject(resErr); + } + } + }); + }); +}; + +/** + * URL definition + * + * @typedef URL + * @type {Object} + * @property {string} [host] - host + * @property {string} [passphrase] - passphrase to sue for auth + * @property {'http' | 'https'} [protocol = 'https'] - HTTP protocol + * @property {integer} [port = 443] - port + * @property {string} [uri] - URI / path + * @property {string} [username] - username to use for auth + */ +/** + * @typedef RequestOptions + * @type {URL} + * @property {boolean} [allowSelfSignedCert = false] - do not require SSL certificates be valid + * @property {any} [body] - HTTP body, must be a Buffer, String or ReadStream or JSON-serializable object + * @property {boolean} [continueOnErrorCode = false] - continue on non-successful response code + * @property {Array|integer} [expectedResponseCode = 200] - expected response code + * @property {string} [fullURI] - full HTTP URI + * @property {boolean} [gzip] - accept compressed content from the server + * @property {Object} [headers] - HTTP headers + * @property {boolean} [includeResponseObject = false] - return [body, responseObject] + * @property {boolean} [json = true] - sets HTTP body to JSON representation of value + * @property {logger.Logger} [logger] - logger + * @property {string} [method = 'GET'] - HTTP method + * @property {integer} [port = 443] - HTTP port + * @property {'http' | 'https'} [protocol = 'https'] - HTTP protocol + * @property {string | URL} [proxy] - proxy URI or proxy config + * @property {boolean} [rawResponseBody = false] - return response as Buffer object with binary data + * @property {integer} [timeout] - milliseconds to wait for a socket timeout (option from 'request' library) + */ diff --git a/test/functional/testRunner.js b/test/functional/testRunner.js index a8af9d60..007d6cd7 100644 --- a/test/functional/testRunner.js +++ b/test/functional/testRunner.js @@ -10,53 +10,78 @@ /* eslint-disable no-console */ -// initialize logger -const util = require('./shared/util'); // eslint-disable-line +/** + * @module test/functional/testRunner + */ + const constants = require('./shared/constants'); -const dutTests = require('./dutTests'); const consumerHostTests = require('./consumerSystemTests'); -const pullConsumerHostTests = require('./pullConsumerSystemTests'); +const dutTests = require('./dutTests'); +const harnessUtils = require('./shared/harness'); +const miscUtils = require('./shared/utils/misc'); + +const runConsumerTests = !miscUtils.getEnvArg(constants.ENV_VARS.TEST_CONTROLS.TESTS.SKIP_CONSUMER_TESTS, { + castTo: 'boolean', + defaultValue: false +}); +const runDutSetup = !miscUtils.getEnvArg(constants.ENV_VARS.TEST_CONTROLS.TESTS.SKIP_DUT_SETUP, { + castTo: 'boolean', + defaultValue: false +}); +const runDutTeardown = !miscUtils.getEnvArg(constants.ENV_VARS.TEST_CONTROLS.TESTS.SKIP_DUT_TEARDOWN, { + castTo: 'boolean', + defaultValue: false +}); +const runDutTests = !miscUtils.getEnvArg(constants.ENV_VARS.TEST_CONTROLS.TESTS.SKIP_DUT_TESTS, { + castTo: 'boolean', + defaultValue: false +}); -const skipDut = process.env[constants.ENV_VARS.TEST_CONTROLS.SKIP_DUT_TESTS]; -const skipConsumer = process.env[constants.ENV_VARS.TEST_CONTROLS.SKIP_CONSUMER_TESTS]; -const skipPullConsumer = process.env[constants.ENV_VARS.TEST_CONTROLS.SKIP_PULL_CONSUMER_TESTS]; -const truthyRegex = /^\s*(true|1)\s*$/i; +console.info('Directory for artifacts:', constants.ARTIFACTS_DIR); +miscUtils.createDir(constants.ARTIFACTS_DIR); -const runDut = !skipDut || !truthyRegex.test(skipDut); -const runConsumer = !skipConsumer || !truthyRegex.test(skipConsumer); -const runPullConsumer = !skipPullConsumer || !truthyRegex.test(skipPullConsumer); +console.info('Harness initialization'); +const harness = harnessUtils.initializeFromEnv(); +harnessUtils.setDefaultHarness(harness); describe('Global: Setup', () => { - dutTests.setup(); - if (runConsumer) { - consumerHostTests.setup(); + if (runDutSetup) { + describe('DUT(s) setup', dutTests.setup); + } else { + console.warn('WARN: skipping DUT setup'); } - if (runPullConsumer) { - pullConsumerHostTests.setup(); + + if (runConsumerTests) { + describe('CS(s) and tests setup', consumerHostTests.setup); + } else { + console.warn('WARN: skipping Consumer setup'); } }); describe('Global: Test', () => { - if (runDut) { - dutTests.test(); + if (runDutTests) { + describe('DUT(s) tests', dutTests.test); } else { console.warn('WARN: skipping DUT tests'); } - if (runConsumer) { - consumerHostTests.test(); + + if (runConsumerTests) { + describe('CS(s) tests', consumerHostTests.test); } else { console.warn('WARN: skipping Consumers tests'); } - if (runPullConsumer) { - pullConsumerHostTests.test(); - } else { - console.warn('WARN: skipping Pull Consumers tests'); - } }); describe('Global: Teardown', () => { - dutTests.teardown(); - if (runConsumer) { - consumerHostTests.teardown(); + if (runDutTeardown) { + describe('DUT(s) teardown', dutTests.teardown); + } else { + console.warn('WARN: skipping DUT teardown'); + } + + if (runConsumerTests) { + describe('CS(s) teardown', consumerHostTests.teardown); + } else { + console.warn('WARN: skipping Consumer teardown'); } }); diff --git a/test/unit/consumers/azureLogAnalyticsConsumerTests.js b/test/unit/consumers/azureLogAnalyticsConsumerTests.js index 3b7b115a..9b191b4b 100644 --- a/test/unit/consumers/azureLogAnalyticsConsumerTests.js +++ b/test/unit/consumers/azureLogAnalyticsConsumerTests.js @@ -38,6 +38,14 @@ describe('Azure_Log_Analytics', () => { allowSelfSignedCert: false }; + const propertyBasedConsumerConfig = { + workspaceId: 'myWorkspace', + passphrase: 'secret', + useManagedIdentity: false, + allowSelfSignedCert: false, + format: 'propertyBased' + }; + const getOpsInsightsReq = () => { const opInsightsReq = requests.find((r) => r.fullURI === 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01'); assert.notStrictEqual(opInsightsReq, undefined); @@ -185,6 +193,16 @@ describe('Azure_Log_Analytics', () => { .then(() => assert.sameDeepMembers(getAllOpsInsightsReqs(), azureLogData.systemData[0].expectedData)); }); + it('should process systemInfo data with propertyBased setting', () => { + const context = testUtil.buildConsumerContext({ + eventType: 'systemInfo', + config: propertyBasedConsumerConfig + }); + return azureAnalyticsIndex(context) + .then(() => assert.sameDeepMembers(getAllOpsInsightsReqs(), + azureLogData.propertyBasedSystemData[0].expectedData)); + }); + it('should process event data', () => { const context = testUtil.buildConsumerContext({ eventType: 'AVR', diff --git a/test/unit/consumers/azureUtilTests.js b/test/unit/consumers/azureUtilTests.js index 13d9231b..9c74c0e7 100644 --- a/test/unit/consumers/azureUtilTests.js +++ b/test/unit/consumers/azureUtilTests.js @@ -352,7 +352,16 @@ describe('Azure Util Tests', () => { name: 'topLevelKey1' } }; - assert.isFalse(azureUtil.isConfigItems(testData, 'someType')); + assert.isFalse(azureUtil.isConfigItems(testData, 'someType', false)); + }); + + it('poolMembers type is always config item', () => { + const testData = { + topLevelKey1: { + name: 'topLevelKey1' + } + }; + assert.isTrue(azureUtil.isConfigItems(testData, 'poolMembers', true)); }); it('"proper path" record, but not matching name', () => { @@ -363,7 +372,7 @@ describe('Azure Util Tests', () => { name: 'bad name' } }; - assert.isFalse(azureUtil.isConfigItems(testData, 'someType')); + assert.isFalse(azureUtil.isConfigItems(testData, 'someType', false)); }); it('two "proper path" records, first name does not match', () => { @@ -375,7 +384,7 @@ describe('Azure Util Tests', () => { name: '/path2a/path2b' } }; - assert.isFalse(azureUtil.isConfigItems(testData, 'someType')); + assert.isFalse(azureUtil.isConfigItems(testData, 'someType', false)); }); it('two "proper path" records, second name does not match', () => { @@ -387,7 +396,7 @@ describe('Azure Util Tests', () => { name: 'bad name' } }; - assert.isFalse(azureUtil.isConfigItems(testData, 'someType')); + assert.isFalse(azureUtil.isConfigItems(testData, 'someType', false)); }); it('"proper path" records with matching names', () => { @@ -401,7 +410,7 @@ describe('Azure Util Tests', () => { name: '/path2a/path2b' } }; - assert.isTrue(azureUtil.isConfigItems(testData, 'someType')); + assert.isTrue(azureUtil.isConfigItems(testData, 'someType', false)); }); it('one "proper path" record, one not', () => { @@ -413,7 +422,7 @@ describe('Azure Util Tests', () => { name: '/badPath' } }; - assert.isFalse(azureUtil.isConfigItems(testData, 'someType')); + assert.isFalse(azureUtil.isConfigItems(testData, 'someType', false)); }); it('sslCerts type with matching name', () => { @@ -423,7 +432,7 @@ describe('Azure Util Tests', () => { name: 'topLevelKey1' } }; - assert.isTrue(azureUtil.isConfigItems(testData, 'sslCerts')); + assert.isTrue(azureUtil.isConfigItems(testData, 'sslCerts', false)); }); it('sslCerts type with name does not match', () => { @@ -437,7 +446,7 @@ describe('Azure Util Tests', () => { name: 'badKey' } }; - assert.isFalse(azureUtil.isConfigItems(testData, 'sslCerts')); + assert.isFalse(azureUtil.isConfigItems(testData, 'sslCerts', false)); }); it('sslCerts type with missing name', () => { @@ -450,7 +459,7 @@ describe('Azure Util Tests', () => { key1: 'value1' } }; - assert.isFalse(azureUtil.isConfigItems(testData, 'sslCerts')); + assert.isFalse(azureUtil.isConfigItems(testData, 'sslCerts', false)); }); }); @@ -508,6 +517,306 @@ describe('Azure Util Tests', () => { }); }); + describe('ClassPoolToMembersMapping', () => { + it('test isPoolType', () => { + const poolMemberMapping = new azureUtil.ClassPoolToMembersMapping(); + assert.strictEqual(poolMemberMapping.isPoolType('pools'), true); + assert.strictEqual(poolMemberMapping.isPoolType('mxPools'), true); + assert.strictEqual(poolMemberMapping.isPoolType('arbitrary'), false); + }); + + it('test isPoolMembersType', () => { + const poolMemberMapping = new azureUtil.ClassPoolToMembersMapping(); + assert.strictEqual(poolMemberMapping.isPoolMembersType('poolMembers'), true); + assert.strictEqual(poolMemberMapping.isPoolMembersType('mxPoolMembers'), true); + assert.strictEqual(poolMemberMapping.isPoolMembersType('arbitrary'), false); + }); + + it('test getPoolMembersType', () => { + const poolMemberMapping = new azureUtil.ClassPoolToMembersMapping(); + assert.strictEqual(poolMemberMapping.getPoolMembersType('pools'), 'poolMembers'); + assert.strictEqual(poolMemberMapping.getPoolMembersType('mxPools'), 'mxPoolMembers'); + assert.strictEqual(poolMemberMapping.getPoolMembersType('arbitrary'), null); + }); + + it('test buildPoolMemeberHolder', () => { + const expectedPoolMembersHolder = { + poolMembers: {}, + aPoolMembers: {}, + aaaaPoolMembers: {}, + cnamePoolMembers: {}, + mxPoolMembers: {}, + naptrPoolMembers: {}, + srvPoolMembers: {} + }; + const poolMemberMapping = new azureUtil.ClassPoolToMembersMapping(); + const allPoolMembers = {}; + poolMemberMapping.buildPoolMemeberHolder(allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembersHolder); + }); + }); + + describe('splitMembersFromPools', () => { + it('no pool members', () => { + const pool = { + key: 'value' + }; + // expectedPool is the same as pool + const expectedPool = { + key: 'value' + }; + const allPoolMembers = {}; + // expectedPoolMembers is the same as allPoolMembers + const expectedPoolMembers = {}; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('members is a string (not observed)', () => { + const pool = { + key: 'value', + members: 'not an object' + }; + // expectedPool is the same as pool + const expectedPool = { + key: 'value', + members: 'not an object' + }; + const allPoolMembers = {}; + // expectedPoolMembers is the same as allPoolMembers + const expectedPoolMembers = {}; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('members is an empty object', () => { + const pool = { + key: 'value', + members: {} + }; + // empty object deleted + const expectedPool = { + key: 'value' + }; + const allPoolMembers = {}; + // expectedPoolMembers is the same as allPoolMembers + const expectedPoolMembers = {}; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('pool member is a string (not observed)', () => { + const pool = { + key: 'value', + members: { + key: 'arbitrary string', + topName: { + name: 'topName', + poolName: 'poolName' + } + } + }; + // expectedPool is the same as pool + const expectedPool = { + key: 'value', + members: { + key: 'arbitrary string' + } + }; + const allPoolMembers = {}; + // expectedPoolMembers is the same as allPoolMembers + const expectedPoolMembers = { + 'topName-separator-poolName': { + name: 'topName', + poolName: 'poolName' + } + }; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('pool member does not have poolName (not observed)', () => { + const pool = { + key: 'value', + members: { + topNameA: { + name: 'topNameA', + poolName: 'poolName' + }, + topNameB: { + name: 'topNameB' + } + } + }; + // expectedPool is the same as pool + const expectedPool = { + key: 'value', + members: { + topNameB: { + name: 'topNameB' + } + } + }; + const allPoolMembers = {}; + // expectedPoolMembers is the same as allPoolMembers + const expectedPoolMembers = { + 'topNameA-separator-poolName': { + name: 'topNameA', + poolName: 'poolName' + } + }; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('pool member without name', () => { + const pool = { + key: 'value', + members: { + topName: { + poolName: 'poolName' + } + } + }; + // topName is added as expectedPoolMembers.poolMembers.topName-separator-poolName.name + const expectedPoolMembers = { + 'topName-separator-poolName': { + name: 'topName', + poolName: 'poolName' + } + }; + // no pool.members anymore + const expectedPool = { + key: 'value' + }; + const allPoolMembers = {}; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('pool member name different from top name', () => { + const pool = { + key: 'value', + members: { + topName: { + name: 'memberName', + poolName: 'poolName' + } + } + }; + /* 'topName' overwrites 'memberName' in added as + expectedPoolMembers.poolMembers.topName-separator-poolName.name */ + // in reality, I've only seen that that if the member name is defined, it matches the top name + const expectedPoolMembers = { + 'topName-separator-poolName': { + name: 'topName', + poolName: 'poolName' + } + }; + // no pool.members anymore + const expectedPool = { + key: 'value' + }; + const allPoolMembers = {}; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('two pool members', () => { + const pool = { + key: 'value', + members: { + topNameA: { + name: 'topNameA', + poolName: 'poolName' + }, + topNameB: { + name: 'topNameB', + poolName: 'poolName' + } + } + }; + const expectedPoolMembers = { + 'topNameA-separator-poolName': { + name: 'topNameA', + poolName: 'poolName' + }, + 'topNameB-separator-poolName': { + name: 'topNameB', + poolName: 'poolName' + } + }; + // no pool.members anymore + const expectedPool = { + key: 'value' + }; + const allPoolMembers = {}; + + azureUtil.splitMembersFromPools(pool, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(pool, expectedPool); + }); + + it('two pools, same member name', () => { + const poolA = { + key: 'valueA', + members: { + topName: { + name: 'topName', + poolName: 'poolNameA' + } + } + }; + const poolB = { + key: 'valueB', + members: { + topName: { + name: 'topName', + poolName: 'poolNameB' + } + } + }; + const expectedPoolMembers = { + 'topName-separator-poolNameA': { + name: 'topName', + poolName: 'poolNameA' + }, + 'topName-separator-poolNameB': { + name: 'topName', + poolName: 'poolNameB' + } + }; + // no pool.members anymore + const expectedPoolA = { + key: 'valueA' + }; + const expectedPoolB = { + key: 'valueB' + }; + const allPoolMembers = {}; + + azureUtil.splitMembersFromPools(poolA, allPoolMembers); + azureUtil.splitMembersFromPools(poolB, allPoolMembers); + assert.deepStrictEqual(allPoolMembers, expectedPoolMembers); + assert.deepStrictEqual(poolA, expectedPoolA); + assert.deepStrictEqual(poolB, expectedPoolB); + }); + }); + describe('scrubReservedKeys', () => { it('no key "tenant" - no change', () => { const inputData = { key1: 'value1', key2: 'value2' }; diff --git a/test/unit/consumers/data/azureLogAnalyticsConsumerTestsData.js b/test/unit/consumers/data/azureLogAnalyticsConsumerTestsData.js index 4c89b963..ae1f3057 100644 --- a/test/unit/consumers/data/azureLogAnalyticsConsumerTestsData.js +++ b/test/unit/consumers/data/azureLogAnalyticsConsumerTestsData.js @@ -2113,5 +2113,2102 @@ module.exports = { } ] } + ], + propertyBasedSystemData: [ + { + expectedData: [ + { + allowSelfSignedCert: false, + body: [ + { + afmState: 'quiescent', + apmState: 'Policies Consistent', + asmAttackSignatures: { + ff8080817a3a4908017a3a490958000e: { + filename: 'ASM-AttackSignatures_20190716_122131.im', + createDateTime: 1563279691000, + name: 'ff8080817a3a4908017a3a490958000e' + } + }, + + asmState: 'Policies Consistent', + baseMac: '00:0d:3a:30:34:51', + callBackUrl: 'https://10.0.1.100', + chassisId: '9c3abad5-513a-1c43-5bc2be62e957', + configReady: 'yes', + cpu: 0, + description: 'Telemetry BIG-IP', + diskLatency: { + 'dm-0': { + '%util': '0.00', + name: 'dm-0', + 'r/s': '0.00', + 'w/s': '0.00' + }, + 'dm-1': { + '%util': '0.01', + name: 'dm-1', + 'r/s': '0.01', + 'w/s': '11.01' + }, + 'dm-2': { + '%util': '0.00', + name: 'dm-2', + 'r/s': '0.14', + 'w/s': '2.56' + }, + 'dm-3': { + '%util': '0.01', + name: 'dm-3', + 'r/s': '0.01', + 'w/s': '4.28' + }, + 'dm-4': { + '%util': '0.00', + name: 'dm-4', + 'r/s': '0.00', + 'w/s': '0.00' + }, + 'dm-5': { + '%util': '0.00', + name: 'dm-5', + 'r/s': '0.04', + 'w/s': '1.52' + }, + 'dm-6': { + '%util': '0.00', + name: 'dm-6', + 'r/s': '0.13', + 'w/s': '0.00' + }, + 'dm-7': { + '%util': '0.00', + name: 'dm-7', + 'r/s': '0.00', + 'w/s': '0.05' + }, + 'dm-8': { + '%util': '0.01', + name: 'dm-8', + 'r/s': '0.11', + 'w/s': '4.72' + }, + sda: { + '%util': '0.09', + name: 'sda', + 'r/s': '1.46', + 'w/s': '8.25' + }, + sdb: { + '%util': '0.04', + name: 'sdb', + 'r/s': '1.00', + 'w/s': '0.00' + } + }, + diskStorage: { + '/': { + '1024-blocks': '436342', + Capacity: '55%', + Capacity_Float: 0.55, + name: '/' + }, + '/appdata': { + '1024-blocks': '51607740', + Capacity: '3%', + Capacity_Float: 0.03, + name: '/appdata' + }, + '/config': { + '1024-blocks': '3269592', + Capacity: '11%', + Capacity_Float: 0.11, + name: '/config' + }, + '/dev/shm': { + '1024-blocks': '7181064', + Capacity: '9%', + Capacity_Float: 0.09, + name: '/dev/shm' + }, + '/mnt/sshplugin_tempfs': { + '1024-blocks': '7181064', + Capacity: '0%', + Capacity_Float: 0, + name: '/mnt/sshplugin_tempfs' + }, + '/shared': { + '1024-blocks': '20642428', + Capacity: '3%', + Capacity_Float: 0.03, + name: '/shared' + }, + '/shared/rrd.1.2': { + '1024-blocks': '7181064', + Capacity: '1%', + Capacity_Float: 0.01, + name: '/shared/rrd.1.2' + }, + '/usr': { + '1024-blocks': '4136432', + Capacity: '83%', + Capacity_Float: 0.83, + name: '/usr' + }, + '/var': { + '1024-blocks': '3096336', + Capacity: '37%', + Capacity_Float: 0.37, + name: '/var' + }, + '/var/apm/mount/apmclients-7170.2018.627.21-3.0.iso': { + '1024-blocks': '298004', + Capacity: '100%', + Capacity_Float: 1, + name: '/var/apm/mount/apmclients-7170.2018.627.21-3.0.iso' + }, + '/var/log': { + '1024-blocks': '3023760', + Capacity: '8%', + Capacity_Float: 0.08, + name: '/var/log' + }, + '/var/loipc': { + '1024-blocks': '7181064', + Capacity: '0%', + Capacity_Float: 0, + name: '/var/loipc' + }, + '/var/prompt': { + '1024-blocks': '4096', + Capacity: '1%', + Capacity_Float: 0.01, + name: '/var/prompt' + }, + '/var/run': { + '1024-blocks': '7181064', + Capacity: '1%', + Capacity_Float: 0.01, + name: '/var/run' + }, + '/var/tmstat': { + '1024-blocks': '7181064', + Capacity: '1%', + Capacity_Float: 0.01, + name: '/var/tmstat' + } + }, + failoverColor: 'green', + failoverStatus: 'ACTIVE', + gtmConfigTime: '2019-06-07T18:11:53.000Z', + hostname: 'telemetry.bigip.com', + lastAfmDeploy: '2019-06-17T21:24:29.000Z', + lastAsmChange: '2019-06-19T20:15:28.000Z', + licenseReady: 'yes', + location: 'Seattle', + ltmConfigTime: '2019-06-19T21:13:40.000Z', + machineId: 'cd5e51b8-74ef-44c8-985c-7965512c2e87', + marketingName: 'BIG-IP Virtual Edition', + memory: 0, + networkInterfaces: { + 1.1: { + 'counters.bitsIn': 0, + 'counters.bitsOut': 0, + name: '1.1', + status: 'up' + }, + 1.2: { + 'counters.bitsIn': 0, + 'counters.bitsOut': 0, + name: '1.2', + status: 'up' + }, + mgmt: { + 'counters.bitsIn': 0, + 'counters.bitsOut': 0, + name: 'mgmt', + status: 'up' + } + }, + platformId: 'Z100', + provisionReady: 'yes', + provisioning: { + afm: { + level: 'nominal', + name: 'afm' + }, + am: { + level: 'none', + name: 'am' + }, + apm: { + level: 'nominal', + name: 'apm' + }, + asm: { + level: 'nominal', + name: 'asm' + }, + avr: { + level: 'nominal', + name: 'avr' + }, + dos: { + level: 'none', + name: 'dos' + }, + fps: { + level: 'none', + name: 'fps' + }, + gtm: { + level: 'none', + name: 'gtm' + }, + ilx: { + level: 'none', + name: 'ilx' + }, + lc: { + level: 'none', + name: 'lc' + }, + ltm: { + level: 'nominal', + name: 'ltm' + }, + pem: { + level: 'none', + name: 'pem' + }, + sslo: { + level: 'none', + name: 'sslo' + }, + swg: { + level: 'none', + name: 'swg' + }, + urldb: { + level: 'none', + name: 'urldb' + } + }, + throughputPerformance: { + clientBitsIn: { + average: 0, + current: 0, + max: 0, + name: 'clientBitsIn' + }, + clientBitsOut: { + average: 0, + current: 0, + max: 0, + name: 'clientBitsOut' + }, + clientIn: { + average: 0, + current: 0, + max: 0, + name: 'clientIn' + }, + clientOut: { + average: 0, + current: 0, + max: 0, + name: 'clientOut' + }, + compression: { + average: 0, + current: 0, + max: 0, + name: 'compression' + }, + inBits: { + average: 0, + current: 0, + max: 0, + name: 'inBits' + }, + inPackets: { + average: 0, + current: 0, + max: 0, + name: 'inPackets' + }, + managementBitsIn: { + average: 2969820, + current: 846485, + max: 36591317, + name: 'managementBitsIn' + }, + managementBitsOut: { + average: 133, + current: 0, + max: 12478, + name: 'managementBitsOut' + }, + outBits: { + average: 0, + current: 0, + max: 0, + name: 'outBits' + }, + outPackets: { + average: 0, + current: 0, + max: 0, + name: 'outPackets' + }, + serverBitsIn: { + average: 0, + current: 0, + max: 0, + name: 'serverBitsIn' + }, + serverBitsOut: { + average: 0, + current: 0, + max: 0, + name: 'serverBitsOut' + }, + serverIn: { + average: 0, + current: 0, + max: 0, + name: 'serverIn' + }, + serverOut: { + average: 0, + current: 0, + max: 0, + name: 'serverOut' + }, + serviceBits: { + average: 0, + current: 0, + max: 0, + name: 'serviceBits' + }, + servicePackets: { + average: 0, + current: 0, + max: 0, + name: 'servicePackets' + }, + sslTps: { + average: 0, + current: 0, + max: 0, + name: 'sslTps' + } + }, + configSyncSucceeded: true, + syncColor: 'green', + syncMode: 'standalone', + syncStatus: 'Standalone', + syncSummary: ' ', + systemTimestamp: '2019-01-01T01:01:01Z', + swap: 0, + tmmCpu: 0, + tmmMemory: 0, + tmmTraffic: { + 'clientSideTraffic.bitsIn': 0, + 'clientSideTraffic.bitsOut': 0, + 'serverSideTraffic.bitsIn': 0, + 'serverSideTraffic.bitsOut': 0 + }, + version: '14.0.0.1', + versionBuild: '0.0.2' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:4hO6g0PLWeZAgWdyCllFS4NIks9QD/QGoV6R57TYotg=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_system', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + '/Common/app.app/app_vs': { + appService: '/Common/foofoo.app/foofoo', + application: 'foofoo.app', + availabilityState: 'offline', + 'clientside.bitsIn': 0, + 'clientside.bitsOut': 0, + 'clientside.curConns': 0, + 'clientside.evictedConns': 0, + 'clientside.maxConns': 0, + 'clientside.pktsIn': 0, + 'clientside.pktsOut': 0, + 'clientside.slowKilled': 0, + 'clientside.totConns': 0, + destination: '10.5.6.7:80', + enabledState: 'enabled', + isAvailable: false, + isEnabled: true, + ipProtocol: 'tcp', + mask: '255.255.255.255', + name: '/Common/foofoo.app/foofoo_vs', + pool: '/Common/foofoo.app/foofoo_pool', + profiles: { + '/Common/app/http': { + application: 'app', + name: '/Common/app/http', + tenant: 'Common' + }, + '/Common/tcp': { + name: '/Common/tcp', + tenant: 'Common' + } + }, + 'status.statusReason': 'The virtual server is available', + tenant: 'Common' + }, + '/Example_Tenant/A1/serviceMain': { + application: 'A1', + availabilityState: 'offline', + 'clientside.bitsIn': 0, + 'clientside.bitsOut': 0, + 'clientside.curConns': 0, + 'clientside.evictedConns': 0, + 'clientside.maxConns': 0, + 'clientside.pktsIn': 0, + 'clientside.pktsOut': 0, + 'clientside.slowKilled': 0, + 'clientside.totConns': 0, + destination: '192.0.2.11:443', + enabledState: 'enabled', + isAvailable: false, + isEnabled: true, + ipProtocol: 'tcp', + mask: '255.255.255.0', + name: '/Example_Tenant/A1/serviceMain', + pool: '/Example_Tenant/A1/barbar_pool', + profiles: {}, + 'status.statusReason': 'The children pool member(s) either don\'t have service checking enabled, or service check results are not available yet', + tenant: 'Example_Tenant' + }, + '/Example_Tenant/A1/serviceMain-Redirect': { + application: 'A1', + availabilityState: 'unknown', + 'clientside.bitsIn': 0, + 'clientside.bitsOut': 0, + 'clientside.curConns': 0, + 'clientside.evictedConns': 0, + 'clientside.maxConns': 0, + 'clientside.pktsIn': 0, + 'clientside.pktsOut': 0, + 'clientside.slowKilled': 0, + 'clientside.totConns': 0, + destination: '192.0.2.11:80', + isAvailable: true, + isEnabled: true, + enabledState: 'enabled', + name: '/Example_Tenant/A1/serviceMain-Redirect', + profiles: { + '/Common/app/http': { + application: 'app', + name: '/Common/app/http', + tenant: 'Common' + }, + '/Common/customTcp': { + name: '/Common/customTcp', + tenant: 'Common' + } + }, + 'status.statusReason': 'The children pool member(s) either don\'t have service checking enabled, or service check results are not available yet', + tenant: 'Example_Tenant' + } + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:Ljrkg5HfQ5gH5GFUZz+lBrJngdOxnqD4cIqkY+CQvsQ=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_virtualServers', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + activeMemberCnt: 0, + application: 'app.app', + availabilityState: 'available', + enabledState: 'enabled', + name: '/Common/app.app/app_pool', + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'The pool is available', + f5tenant: 'Common' + }, + { + activeMemberCnt: 0, + application: '', + availabilityState: 'available', + enabledState: 'enabled', + name: '/Common/telemetry-local', + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'The pool is available', + f5tenant: 'Common' + }, + { + activeMemberCnt: 0, + application: 'A1', + availabilityState: 'offline', + enabledState: 'enabled', + name: '/Example_Tenant/A1/hsl_pool', + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'The pool is available', + f5tenant: 'Example_Tenant' + }, + { + activeMemberCnt: 0, + application: 'A1', + availabilityState: 'offline', + enabledState: 'enabled', + name: '/Example_Tenant/A1/web_pool', + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'The pool is available', + f5tenant: 'Example_Tenant' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:nMxWCUFvHmH1EZ2zAHc6l6rKvU0HBvAtxmbkqYIUBcs=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_pools', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + actions: { + 'default:1': { + invoked: 0, + succeeded: 0 + } + }, + application: 'app.app', + invoked: 0, + name: '/Common/app.app/app_policy', + succeeded: 0, + f5tenant: 'Common' + }, + { + actions: { + 'default:0': { + invoked: 0, + succeeded: 0 + } + }, + application: '', + invoked: 0, + name: '/Common/telemetry', + succeeded: 0, + f5tenant: 'Common' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:XlNavOTwpub7izF1+BX486oqJP5Tihz7i6YVGGrSM1o=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_ltmPolicies', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + '2xxResp': 0, + '3xxResp': 0, + '4xxResp': 0, + '5xxResp': 0, + application: 'app.app', + cookiePersistInserts: 0, + getReqs: 0, + maxKeepaliveReq: 0, + name: '/Common/app.app/app_http', + numberReqs: 0, + postReqs: 0, + respGreaterThan2m: 0, + respLessThan2m: 0, + f5tenant: 'Common', + v10Reqs: 0, + v10Resp: 0, + v11Reqs: 0, + v11Resp: 0, + v9Reqs: 0, + v9Resp: 0 + }, + { + '2xxResp': 0, + '3xxResp': 0, + '4xxResp': 0, + '5xxResp': 0, + application: '', + cookiePersistInserts: 0, + getReqs: 0, + maxKeepaliveReq: 0, + name: '/Common/http', + numberReqs: 0, + postReqs: 0, + respGreaterThan2m: 0, + respLessThan2m: 0, + f5tenant: 'Common', + v10Reqs: 0, + v10Resp: 0, + v11Reqs: 0, + v11Resp: 0, + v9Reqs: 0, + v9Resp: 0 + }, + { + '2xxResp': 0, + '3xxResp': 0, + '4xxResp': 0, + '5xxResp': 0, + application: 'A1', + cookiePersistInserts: 0, + getReqs: 0, + maxKeepaliveReq: 0, + name: '/Example_Tenant/A1/custom_http_profile', + numberReqs: 0, + postReqs: 0, + respGreaterThan2m: 0, + respLessThan2m: 0, + f5tenant: 'Example_Tenant', + v10Reqs: 0, + v10Resp: 0, + v11Reqs: 0, + v11Resp: 0, + v9Reqs: 0, + v9Resp: 0 + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:ZIjqZDzom4nenrvipHj3DBdSZW1NnDKZelEkHoleGsA=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_httpProfiles', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + activeHandshakeRejected: 0, + application: '', + 'cipherUses.adhKeyxchg': 0, + 'cipherUses.aesBulk': 0, + 'cipherUses.aesGcmBulk': 0, + 'cipherUses.camelliaBulk': 0, + 'cipherUses.chacha20Poly1305Bulk': 0, + 'cipherUses.desBulk': 0, + 'cipherUses.dhRsaKeyxchg': 0, + 'cipherUses.dheDssKeyxchg': 0, + 'cipherUses.ecdhEcdsaKeyxchg': 0, + 'cipherUses.ecdhRsaKeyxchg': 0, + 'cipherUses.ecdheEcdsaKeyxchg': 0, + 'cipherUses.ecdheRsaKeyxchg': 0, + 'cipherUses.edhRsaKeyxchg': 0, + 'cipherUses.ideaBulk': 0, + 'cipherUses.md5Digest': 0, + 'cipherUses.nullBulk': 0, + 'cipherUses.nullDigest': 0, + 'cipherUses.rc2Bulk': 0, + 'cipherUses.rc4Bulk': 0, + 'cipherUses.rsaKeyxchg': 0, + 'cipherUses.shaDigest': 0, + currentActiveHandshakes: 0, + currentCompatibleConnections: 0, + currentConnections: 0, + currentNativeConnections: 0, + decryptedBytesIn: 0, + decryptedBytesOut: 0, + encryptedBytesIn: 0, + encryptedBytesOut: 0, + fatalAlerts: 0, + handshakeFailures: 0, + name: '/Common/clientssl', + peercertInvalid: 0, + peercertNone: 0, + peercertValid: 0, + 'protocolUses.dtlsv1': 0, + 'protocolUses.sslv2': 0, + 'protocolUses.sslv3': 0, + 'protocolUses.tlsv1': 0, + 'protocolUses.tlsv1_1': 0, + 'protocolUses.tlsv1_2': 0, + 'protocolUses.tlsv1_3': 0, + recordsIn: 0, + recordsOut: 0, + sniRejects: 0, + f5tenant: 'Common', + totCompatConns: 0, + totNativeConns: 0 + }, + { + activeHandshakeRejected: 0, + application: 'A1', + 'cipherUses.adhKeyxchg': 0, + 'cipherUses.aesBulk': 0, + 'cipherUses.aesGcmBulk': 0, + 'cipherUses.camelliaBulk': 0, + 'cipherUses.chacha20Poly1305Bulk': 0, + 'cipherUses.desBulk': 0, + 'cipherUses.dhRsaKeyxchg': 0, + 'cipherUses.dheDssKeyxchg': 0, + 'cipherUses.ecdhEcdsaKeyxchg': 0, + 'cipherUses.ecdhRsaKeyxchg': 0, + 'cipherUses.ecdheEcdsaKeyxchg': 0, + 'cipherUses.ecdheRsaKeyxchg': 0, + 'cipherUses.edhRsaKeyxchg': 0, + 'cipherUses.ideaBulk': 0, + 'cipherUses.md5Digest': 0, + 'cipherUses.nullBulk': 0, + 'cipherUses.nullDigest': 0, + 'cipherUses.rc2Bulk': 0, + 'cipherUses.rc4Bulk': 0, + 'cipherUses.rsaKeyxchg': 0, + 'cipherUses.shaDigest': 0, + currentActiveHandshakes: 0, + currentCompatibleConnections: 0, + currentConnections: 0, + currentNativeConnections: 0, + decryptedBytesIn: 0, + decryptedBytesOut: 0, + encryptedBytesIn: 0, + encryptedBytesOut: 0, + fatalAlerts: 0, + handshakeFailures: 0, + name: '/Example_Tenant/A1/webtls', + peercertInvalid: 0, + peercertNone: 0, + peercertValid: 0, + 'protocolUses.dtlsv1': 0, + 'protocolUses.sslv2': 0, + 'protocolUses.sslv3': 0, + 'protocolUses.tlsv1': 0, + 'protocolUses.tlsv1_1': 0, + 'protocolUses.tlsv1_2': 0, + 'protocolUses.tlsv1_3': 0, + recordsIn: 0, + recordsOut: 0, + sniRejects: 0, + f5tenant: 'Example_Tenant', + totCompatConns: 100, + totNativeConns: 100 + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:MbQoFG3klStE7rn0IOCGn/DOiZo21L85LCapDmF7MI4=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_clientSslProfiles', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + activeHandshakeRejected: 0, + application: '', + 'cipherUses.adhKeyxchg': 0, + 'cipherUses.aesBulk': 0, + 'cipherUses.aesGcmBulk': 0, + 'cipherUses.camelliaBulk': 0, + 'cipherUses.chacha20Poly1305Bulk': 0, + 'cipherUses.desBulk': 0, + 'cipherUses.dhRsaKeyxchg': 0, + 'cipherUses.dheDssKeyxchg': 0, + 'cipherUses.ecdhEcdsaKeyxchg': 0, + 'cipherUses.ecdhRsaKeyxchg': 0, + 'cipherUses.ecdheEcdsaKeyxchg': 0, + 'cipherUses.ecdheRsaKeyxchg': 0, + 'cipherUses.edhRsaKeyxchg': 0, + 'cipherUses.ideaBulk': 0, + 'cipherUses.md5Digest': 0, + 'cipherUses.nullBulk': 0, + 'cipherUses.nullDigest': 0, + 'cipherUses.rc2Bulk': 0, + 'cipherUses.rc4Bulk': 0, + 'cipherUses.rsaKeyxchg': 0, + 'cipherUses.shaDigest': 0, + currentActiveHandshakes: 0, + currentCompatibleConnections: 0, + currentConnections: 0, + currentNativeConnections: 0, + decryptedBytesIn: 0, + decryptedBytesOut: 0, + encryptedBytesIn: 0, + encryptedBytesOut: 0, + fatalAlerts: 0, + handshakeFailures: 0, + name: '/Common/serverssl', + peercertInvalid: 0, + peercertNone: 0, + peercertValid: 0, + 'protocolUses.dtlsv1': 0, + 'protocolUses.sslv2': 0, + 'protocolUses.sslv3': 0, + 'protocolUses.tlsv1': 0, + 'protocolUses.tlsv1_1': 0, + 'protocolUses.tlsv1_2': 0, + 'protocolUses.tlsv1_3': 0, + recordsIn: 0, + recordsOut: 0, + f5tenant: 'Common', + totCompatConns: 0, + totNativeConns: 0 + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:Y0yPeK/E0D0CKfF6TwuOmgM+lXxLjIVmcGo8THkLiY4=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_serverSslProfiles', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + expirationDate: 0, + expirationString: '2019-01-01T01:01:01Z', + issuer: 'CN=Starfield Services Root Certificate Authority,OU=http://certificates.starfieldtech.com/repository/,O=Starfield Technologies, Inc.,L=Scottsdale,ST=Arizona,C=US', + name: 'ca-bundle.crt', + subject: 'CN=Starfield Services Root Certificate Authority,OU=http://certificates.starfieldtech.com/repository/,O=Starfield Technologies, Inc.,L=Scottsdale,ST=Arizona,C=US' + }, + { + email: 'root@localhost.localdomain', + expirationDate: 0, + expirationString: '2019-01-01T01:01:01Z', + issuer: 'emailAddress=root@localhost.localdomain,CN=localhost.localdomain,OU=IT,O=MyCompany,L=Seattle,ST=WA,C=US', + name: 'default.crt', + subject: 'emailAddress=root@localhost.localdomain,CN=localhost.localdomain,OU=IT,O=MyCompany,L=Seattle,ST=WA,C=US' + }, + { + expirationDate: 0, + expirationString: '2019-01-01T01:01:01Z', + issuer: 'CN=Entrust Root Certification Authority - G2,OU=(c) 2009 Entrust, Inc. - for authorized use only,OU=See www.entrust.net/legal-terms,O=Entrust, Inc.,C=US', + name: 'f5-ca-bundle.crt', + subject: 'CN=Entrust Root Certification Authority - G2,OU=(c) 2009 Entrust, Inc. - for authorized use only,OU=See www.entrust.net/legal-terms,O=Entrust, Inc.,C=US' + }, + { + email: 'support@f5.com', + expirationDate: 0, + expirationString: '2019-01-01T01:01:01Z', + issuer: 'emailAddress=support@f5.com,CN=support.f5.com,OU=Product Development,O=F5 Networks,L=Seattle,ST=Washington,C=US', + name: 'f5-irule.crt', + subject: 'emailAddress=support@f5.com,CN=support.f5.com,OU=Product Development,O=F5 Networks,L=Seattle,ST=Washington,C=US' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:C6ocqhVZvMvXBSs8s2ViFzUg/OUvPfGPFxMO8HbFwq8=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_sslCerts', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + application: '', + hcInBroadcastPkts: 0, + hcInMulticastPkts: 0, + hcInOctets: 0, + hcInUcastPkts: 0, + hcOutBroadcastPkts: 0, + hcOutMulticastPkts: 0, + hcOutOctets: 0, + hcOutUcastPkts: 0, + inDiscards: 0, + inErrors: 0, + inUnknownProtos: 0, + name: '/Common/http-tunnel', + outDiscards: 0, + outErrors: 0, + f5tenant: 'Common' + }, + { + application: '', + hcInBroadcastPkts: 0, + hcInMulticastPkts: 0, + hcInOctets: 0, + hcInUcastPkts: 0, + hcOutBroadcastPkts: 0, + hcOutMulticastPkts: 0, + hcOutOctets: 0, + hcOutUcastPkts: 0, + inDiscards: 0, + inErrors: 0, + inUnknownProtos: 0, + name: '/Common/socks-tunnel', + outDiscards: 0, + outErrors: 0, + f5tenant: 'Common' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:9RL/uf4VRIIOiH/0bjDJgoLRrwEgS+FES0OZNfAVjAY=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_networkTunnels', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + commitIdTime: '2019-06-10T17:23:02.000Z', + lssTime: '-', + name: '/Common/device_trust_group', + f5tenant: 'Common', + timeSinceLastSync: '-', + type: 'sync-only' + }, + { + commitIdTime: '2019-05-31T01:11:48.000Z', + lssTime: '2019-05-31T01:11:48.000Z', + name: '/Common/example_device_group', + f5tenant: 'Common', + timeSinceLastSync: '1221553', + type: 'sync-failover' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:c/P55mrDOlkKftfjBdTqnLedZXK92rfynjs0r9mRuYI=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_deviceGroups', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + application: '', + events: { + HTTP_REQUEST: { + aborts: 0, + avgCycles: 19014, + failures: 0, + maxCycles: 19014, + minCycles: 8804, + priority: 500, + totalExecutions: 4 + }, + RULE_INIT: { + aborts: 0, + avgCycles: 19014, + failures: 0, + maxCycles: 19014, + minCycles: 8804, + priority: 500, + totalExecutions: 4 + } + }, + name: '/Common/_sys_APM_ExchangeSupport_OA_BasicAuth', + f5tenant: 'Common' + }, + { + application: '', + events: { + RULE_INIT: { + aborts: 0, + avgCycles: 28942, + failures: 0, + maxCycles: 28942, + minCycles: 20102, + priority: 500, + totalExecutions: 4 + } + }, + name: '/Common/_sys_APM_ExchangeSupport_OA_NtlmAuth', + f5tenant: 'Common' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:LcWIT+/Ue6Fif8sFPt011GJ1yKsGydoyO+WlpowB5RQ=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_iRules', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + aliases: [ + 'www.aone.com' + ], + alternate: 0, + cnameResolutions: 0, + dropped: 0, + enabled: true, + failureRcode: 'noerror', + failureRcodeResponse: 'disabled', + failureRcodeTtl: 0, + fallback: 0, + lastResortPool: '', + loadBalancingDecisionLogVerbosity: [ + 'pool-traversal' + ], + minimalResponse: 'enabled', + name: '/Common/www.aone.tstest.com', + persistCidrIpv4: 32, + persistCidrIpv6: 128, + persisted: 0, + persistence: 'disabled', + poolLbMode: 'round-robin', + pools: [ + '/Common/ts_a_pool', + '/Common/ts_cname_pool' + ], + preferred: 0, + rcode: 0, + requests: 0, + resolutions: 0, + returnFromDns: 0, + returnToDns: 0, + rules: [ + '/Common/test_irule' + ], + 'status.availabilityState': 'offline', + 'status.enabledState': 'enabled', + 'status.statusReason': 'No enabled pools available', + f5tenant: 'Common', + ttlPersistence: 3600, + wipType: 'A' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:zIo9RssfV/hIdtcNLBNpaoI58zGmw7LILt5aR9VViu8=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_aWideIps', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + aliases: [ + 'www.aaaaone.com', + 'www.aaaathree.com', + 'www.aaaatwo.com' + ], + alternate: 0, + cnameResolutions: 0, + dropped: 0, + enabled: true, + failureRcode: 'formerr', + failureRcodeResponse: 'enabled', + failureRcodeTtl: 0, + fallback: 0, + lastResortPool: '/Common/ts_aaaa_pool', + loadBalancingDecisionLogVerbosity: [ + 'pool-traversal' + ], + minimalResponse: 'disabled', + name: '/Common/www.aaaaone.tstest.com', + persistCidrIpv4: 32, + persistCidrIpv6: 128, + persisted: 0, + persistence: 'disabled', + poolLbMode: 'round-robin', + pools: [ + '/Common/ts_aaaa_pool' + ], + preferred: 0, + rcode: 0, + requests: 0, + resolutions: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.availabilityState': 'offline', + 'status.enabledState': 'enabled', + 'status.statusReason': 'No enabled pools available', + f5tenant: 'Common', + ttlPersistence: 3600, + wipType: 'AAAA' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:3/rIJRb8E9CeVGRLfc4kyUbw8Us6Ygi7wjQ7VvISdxI=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_aaaaWideIps', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + '/Common/www.cnameone.tstest.com': { + aliases: [ + 'www.cname.com' + ], + alternate: 0, + cnameResolutions: 0, + dropped: 0, + enabled: true, + failureRcode: 'noerror', + failureRcodeResponse: 'disabled', + failureRcodeTtl: 0, + fallback: 0, + lastResortPool: '', + minimalResponse: 'enabled', + name: 'www.cnameone.tstest.com', + persistCidrIpv4: 32, + persistCidrIpv6: 128, + persisted: 0, + persistence: 'disabled', + poolLbMode: 'round-robin', + pools: [ + '/Common/ts_cname_pool' + ], + preferred: 0, + rcode: 0, + requests: 0, + resolutions: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.availabilityState': 'unknown', + 'status.enabledState': 'enabled', + 'status.statusReason': 'Checking', + tenant: 'Common', + ttlPersistence: 3600, + wipType: 'CNAME' + }, + '/Common/www.cnametwo.tstest.com': { + aliases: [ + 'www.cname2.com', + 'www.cnametwo.com' + ], + alternate: 0, + cnameResolutions: 0, + dropped: 0, + enabled: true, + failureRcode: 'noerror', + failureRcodeResponse: 'disabled', + failureRcodeTtl: 0, + fallback: 0, + lastResortPool: '/Common/ts_cname_pool', + loadBalancingDecisionLogVerbosity: [ + 'pool-selection', + 'pool-traversal', + 'pool-member-selection', + 'pool-member-traversal' + ], + minimalResponse: 'enabled', + name: 'www.cnametwo.tstest.com', + persistCidrIpv4: 32, + persistCidrIpv6: 128, + persisted: 0, + persistence: 'disabled', + poolLbMode: 'topology', + preferred: 0, + rcode: 0, + requests: 0, + resolutions: 0, + returnFromDns: 0, + returnToDns: 0, + rules: [ + '/Common/test_irule' + ], + 'status.availabilityState': 'unknown', + 'status.enabledState': 'enabled', + 'status.statusReason': 'Checking', + tenant: 'Common', + ttlPersistence: 3600, + wipType: 'CNAME' + } + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:SyBlvtHOAcr0DOu249kbsP0QdWM7d9oduaEgZOgAIZU=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_cnameWideIps', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + aliases: [ + 'www.mxone.com', + 'www.mxtwo.com' + ], + alternate: 0, + cnameResolutions: 0, + dropped: 0, + enabled: true, + failureRcode: 'noerror', + failureRcodeResponse: 'disabled', + failureRcodeTtl: 0, + fallback: 0, + lastResortPool: '/Common/ts_mx_pool', + loadBalancingDecisionLogVerbosity: [ + 'pool-traversal', + 'pool-member-selection' + ], + minimalResponse: 'enabled', + name: '/Common/www.mxone.tstest.com', + persistCidrIpv4: 132, + persistCidrIpv6: 128, + persisted: 0, + persistence: 'enabled', + poolLbMode: 'topology', + pools: [ + '/Common/ts_mx_pool' + ], + preferred: 0, + rcode: 0, + requests: 0, + resolutions: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.availabilityState': 'offline', + 'status.enabledState': 'enabled', + 'status.statusReason': 'No enabled pools available', + f5tenant: 'Common', + ttlPersistence: 3600, + wipType: 'MX' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:2bpZKcgxzt1r9ImL/vDEkrCUj7ZYKyjhu2fZRkh29V4=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_mxWideIps', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + aliases: [ + 'www.naptrone.com', + 'www.naptrtwo.com' + ], + alternate: 0, + cnameResolutions: 0, + dropped: 0, + enabled: true, + failureRcode: 'notimpl', + failureRcodeResponse: 'enabled', + failureRcodeTtl: 0, + fallback: 0, + lastResortPool: '/Common/ts_naptr_pool', + loadBalancingDecisionLogVerbosity: [ + 'pool-selection' + ], + minimalResponse: 'disabled', + name: '/Common/www.naptrone.tstest.com', + persistCidrIpv4: 32, + persistCidrIpv6: 128, + persisted: 0, + persistence: 'disabled', + poolLbMode: 'global-availability', + pools: [ + '/Common/ts_cname_pool' + ], + preferred: 0, + rcode: 0, + requests: 0, + resolutions: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.availabilityState': 'offline', + 'status.enabledState': 'enabled', + 'status.statusReason': 'No enabled pools available', + f5tenant: 'Common', + ttlPersistence: 3600, + wipType: 'NAPTR' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:KsTZ0SbN/FUFq59QvSbc3LV+DAiJi4oa751gf5KKxls=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_naptrWideIps', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + aliases: [ + 'www.srvone.com' + ], + alternate: 0, + cnameResolutions: 0, + dropped: 0, + enabled: true, + failureRcode: 'servfail', + failureRcodeResponse: 'enabled', + failureRcodeTtl: 0, + fallback: 0, + lastResortPool: '/Common/ts_cname_pool', + minimalResponse: 'disabled', + name: '/Common/www.srvone.tstest.com', + persistCidrIpv4: 32, + persistCidrIpv6: 128, + persisted: 0, + persistence: 'disabled', + poolLbMode: 'round-robin', + pools: [ + '/Common/ts_srv_pool' + ], + preferred: 0, + rcode: 0, + requests: 0, + resolutions: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.availabilityState': 'offline', + 'status.enabledState': 'enabled', + 'status.statusReason': 'No enabled pools available', + f5tenant: 'Common', + ttlPersistence: 3600, + wipType: 'SRV' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:w5uZEhdapCNKyKI7Gmqc5b7wavnEQohHpPWlRw/G85s=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_srvWideIps', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + alternateMode: 'round-robin', + availabilityState: 'offline', + dropped: 0, + dynamicRatio: 'disabled', + enabled: true, + enabledState: 'enabled', + fallback: 0, + fallbackIp: '8.8.8.8', + fallbackMode: 'return-to-dns', + limitMaxBps: 0, + limitMaxBpsStatus: 'disabled', + limitMaxConnections: 0, + limitMaxConnectionsStatus: 'disabled', + limitMaxPps: 0, + limitMaxPpsStatus: 'disabled', + loadBalancingMode: 'ratio', + manualResume: 'disabled', + maxAnswersReturned: 1, + monitor: '/Common/gateway_icmp', + name: '/Common/ts_a_pool', + poolType: 'A', + preferred: 0, + qosHitRatio: 5, + qosHops: 0, + qosKilobytesSecond: 3, + qosLcs: 30, + qosPacketRate: 1, + qosRtt: 50, + qosTopology: 0, + qosVsCapacity: 0, + qosVsScore: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.statusReason': 'No enabled pool members available', + f5tenant: 'Common', + ttl: 30, + verifyMemberAvailability: 'disabled' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:XuY92KZOtXYHDIwwINEEP8IYRJc7/YDzdhttWoc9BcU=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_aPools', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + alternateMode: 'topology', + availabilityState: 'offline', + dropped: 0, + dynamicRatio: 'enabled', + enabled: true, + enabledState: 'enabled', + fallback: 0, + fallbackIp: 'any', + fallbackMode: 'return-to-dns', + limitMaxBps: 0, + limitMaxBpsStatus: 'disabled', + limitMaxConnections: 0, + limitMaxConnectionsStatus: 'enabled', + limitMaxPps: 0, + limitMaxPpsStatus: 'disabled', + loadBalancingMode: 'round-robin', + manualResume: 'disabled', + maxAnswersReturned: 1, + monitor: 'min 1 of { /Common/http /Common/tcp }', + name: '/Common/ts_aaaa_pool', + poolType: 'AAAA', + preferred: 0, + qosHitRatio: 5, + qosHops: 0, + qosKilobytesSecond: 3, + qosLcs: 30, + qosPacketRate: 1, + qosRtt: 50, + qosTopology: 0, + qosVsCapacity: 0, + qosVsScore: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.statusReason': 'No enabled pool members available', + f5tenant: 'Common', + ttl: 30, + verifyMemberAvailability: 'enabled' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:szYFmlNFadGlABngNl6HH4RR2ieCPQOCckt23fbfdnI=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_aaaaPools', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + '/Common/ts_cname_pool': { + alternate: 0, + alternateMode: 'round-robin', + availabilityState: 'unknown', + dropped: 0, + dynamicRatio: 'disabled', + enabled: true, + enabledState: 'enabled', + fallback: 0, + fallbackMode: 'return-to-dns', + loadBalancingMode: 'round-robin', + manualResume: 'disabled', + members: { + 'www.cnameone.tstest.com': { + alternate: 0, + availabilityState: 'unknown', + enabledState: 'enabled', + fallback: 0, + poolName: '/Common/ts_cname_pool', + poolType: 'CNAME', + preferred: 0, + serverName: 'www.cnameone.tstest.com', + 'status.statusReason': 'Checking', + vsName: ' ' + } + }, + name: 'ts_cname_pool', + poolType: 'CNAME', + preferred: 0, + qosHitRatio: 5, + qosHops: 0, + qosKilobytesSecond: 3, + qosLcs: 30, + qosPacketRate: 1, + qosRtt: 50, + qosTopology: 0, + qosVsCapacity: 0, + qosVsScore: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.statusReason': 'Checking', + tenant: 'Common', + ttl: 30, + verifyMemberAvailability: 'enabled' + } + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:3jFR/uLKBlik5kLw1lSgn3ItjdLiqd6hwm01NxdKRqk=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_cnamePools', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + alternateMode: 'topology', + availabilityState: 'offline', + dropped: 0, + dynamicRatio: 'enabled', + enabled: true, + enabledState: 'enabled', + fallback: 0, + fallbackMode: 'return-to-dns', + loadBalancingMode: 'round-robin', + manualResume: 'enabled', + maxAnswersReturned: 12, + name: '/Common/ts_mx_pool', + poolType: 'MX', + preferred: 0, + qosHitRatio: 5, + qosHops: 0, + qosKilobytesSecond: 3, + qosLcs: 30, + qosPacketRate: 1, + qosRtt: 50, + qosTopology: 0, + qosVsCapacity: 0, + qosVsScore: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.statusReason': 'No enabled pool members available', + f5tenant: 'Common', + ttl: 30, + verifyMemberAvailability: 'enabled' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:CptcEmN+61YvhN0Hv4XMWF/+7OMP5Jx6iTOyxSBgyN4=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_mxPools', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + alternateMode: 'virtual-server-score', + availabilityState: 'offline', + dropped: 0, + dynamicRatio: 'disabled', + enabled: true, + enabledState: 'enabled', + fallback: 0, + fallbackMode: 'ratio', + loadBalancingMode: 'static-persistence', + manualResume: 'enabled', + maxAnswersReturned: 1, + name: '/Common/ts_naptr_pool', + poolType: 'NAPTR', + preferred: 0, + qosHitRatio: 5, + qosHops: 0, + qosKilobytesSecond: 3, + qosLcs: 30, + qosPacketRate: 1, + qosRtt: 50, + qosTopology: 0, + qosVsCapacity: 0, + qosVsScore: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.statusReason': 'No enabled pool members available', + f5tenant: 'Common', + ttl: 300, + verifyMemberAvailability: 'enabled' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:Po5bMZ2n7Xgku1ZC2hJ/omlcC+SdZ0Yq0UwflYs7OWw=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_naptrPools', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + alternateMode: 'packet-rate', + availabilityState: 'offline', + dropped: 0, + dynamicRatio: 'disabled', + enabled: true, + enabledState: 'enabled', + fallback: 0, + fallbackMode: 'quality-of-service', + loadBalancingMode: 'virtual-server-capacity', + manualResume: 'disabled', + maxAnswersReturned: 10, + name: '/Common/ts_srv_pool', + poolType: 'SRV', + preferred: 0, + qosHitRatio: 5, + qosHops: 0, + qosKilobytesSecond: 3, + qosLcs: 30, + qosPacketRate: 1, + qosRtt: 50, + qosTopology: 0, + qosVsCapacity: 0, + qosVsScore: 0, + returnFromDns: 0, + returnToDns: 0, + 'status.statusReason': 'No enabled pool members available', + f5tenant: 'Common', + ttl: 130, + verifyMemberAvailability: 'enabled' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:5WSda2dQy2lNzg5nUWLM43BQjDoDADbdZ2mJYln+oeI=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_srvPools', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + cycleEnd: '2019-01-01T01:01:01Z', + cycleStart: '2019-01-01T01:01:01Z', + pollingInterval: 0 + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:Ao/vYptJv0TI5TnHcmSLiGdIwSbCTM0L5xW9Gh4OFbw=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_telemetryServiceInfo', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + value: 'systemInfo' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:HTlS6jtEi0oPBWJtKIlQOpx/IaXTjG4RxnFXr7aE0DY=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_telemetryEventCategory', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + addr: '10.0.3.5', + availabilityState: 'available', + enabledState: 'enabled', + monitorStatus: 'up', + name: '/Common/10.0.3.5:80', + poolName: '/Common/app.app/app_pool', + port: 0, + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'Pool member is available' + }, + { + addr: '10.0.1.100', + availabilityState: 'available', + enabledState: 'enabled', + monitorStatus: 'down', + name: '/Common/10.0.1.100:6514', + poolName: '/Common/telemetry-local', + port: 0, + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'Pool member has been marked down by a monitor' + }, + { + addr: '192.168.120.6', + availabilityState: 'offline', + enabledState: 'enabled', + monitorStatus: 'up', + name: '/Example_Tenant/192.168.120.6:514', + poolName: '/Example_Tenant/A1/hsl_pool', + port: 0, + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'Pool member is available' + }, + { + addr: '192.0.2.12', + availabilityState: 'offline', + enabledState: 'enabled', + monitorStatus: 'up', + name: '/Example_Tenant/192.0.2.12:80', + poolName: '/Example_Tenant/A1/web_pool', + port: 0, + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'Pool member is available' + }, + { + addr: '192.0.2.13', + availabilityState: 'offline', + enabledState: 'enabled', + monitorStatus: 'up', + name: '/Example_Tenant/192.0.2.13:80', + poolName: '/Example_Tenant/A1/web_pool', + port: 0, + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'Pool member is available' + }, + { + addr: '192.0.2.14', + availabilityState: 'unknown', + enabledState: 'enabled', + fqdn: 'bestwebsite.com', + monitorStatus: 'unchecked', + name: '/Example_Tenant/_auto_192.0.2.14:80', + poolName: '/Example_Tenant/A1/web_pool', + port: 80, + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'Pool member does not have service checking enabled', + totRequests: 0 + }, + { + addr: '::', + availabilityState: 'available', + enabledState: 'enabled', + fqdn: 'bestwebsite.com', + monitorStatus: 'fqdn-up', + name: '/Example_Tenant/bestwebsite.com:80', + poolName: '/Example_Tenant/A1/web_pool', + port: 80, + 'serverside.bitsIn': 0, + 'serverside.bitsOut': 0, + 'serverside.curConns': 0, + 'serverside.maxConns': 0, + 'serverside.pktsIn': 0, + 'serverside.pktsOut': 0, + 'serverside.totConns': 0, + 'status.statusReason': 'The DNS server(s) are available', + totRequests: 0 + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:oC2q1memHI4sQeTv8r4Xh0Vtwp5cz2EjmYJT1HECxcc=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_poolMembers', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + availabilityState: 'offline', + enabled: true, + enabledState: 'enabled', + fallback: 0, + limitMaxBps: 0, + limitMaxBpsStatus: 'disabled', + limitMaxConnections: 0, + limitMaxConnectionsStatus: 'disabled', + limitMaxPps: 0, + limitMaxPpsStatus: 'disabled', + memberOrder: 2, + monitor: 'default', + name: 'vs1:/Common/server1', + poolName: '/Common/ts_a_pool', + poolType: 'A', + preferred: 0, + ratio: 1, + serverName: '/Common/server1', + 'status.statusReason': ' Monitor /Common/gateway_icmp from 172.16.100.17 : no route', + vsName: 'vs1' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:XQjsc8aaTedOndVCezRcgYkmf0ZU8QwS8R0zjuF+nr8=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_aPoolMembers', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + availabilityState: 'offline', + enabled: true, + enabledState: 'enabled', + fallback: 0, + limitMaxBps: 0, + limitMaxBpsStatus: 'disabled', + limitMaxConnections: 0, + limitMaxConnectionsStatus: 'disabled', + limitMaxPps: 0, + limitMaxPpsStatus: 'disabled', + memberOrder: 0, + monitor: 'default', + name: 'vs3:/Common/gslb_server1', + poolName: '/Common/ts_aaaa_pool', + poolType: 'AAAA', + preferred: 0, + ratio: 1, + serverName: '/Common/gslb_server1', + 'status.statusReason': ' Monitor /Common/tcp from 172.16.100.17 : state: connect failed', + vsName: 'vs3' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:Smrp8PDn8iDs1SecPRU6OLv7+hM99G73RXTjrnnIcNk=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_aaaaPoolMembers', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + availabilityState: 'offline', + enabledState: 'enabled', + fallback: 0, + name: 'www.aaaaone.tstest.com', + poolName: '/Common/ts_mx_pool', + poolType: 'MX', + preferred: 0, + serverName: 'www.aaaaone.tstest.com', + 'status.statusReason': 'No Wide IPs available: No enabled pools available', + vsName: ' ' + }, + { + alternate: 0, + availabilityState: 'offline', + enabledState: 'enabled', + fallback: 0, + name: 'www.aone.tstest.com', + poolName: '/Common/ts_mx_pool', + poolType: 'MX', + preferred: 0, + serverName: 'www.aone.tstest.com', + 'status.statusReason': 'No Wide IPs available: No enabled pools available', + vsName: ' ' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:EBesHlQhpWMq3g+1TAh54a7EdT9ZlAHJpdkvxnhz9uY=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_mxPoolMembers', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + availabilityState: 'offline', + enabledState: 'enabled', + fallback: 0, + name: 'www.aone.tstest.com', + poolName: '/Common/ts_naptr_pool', + poolType: 'NAPTR', + preferred: 0, + serverName: 'www.aone.tstest.com', + 'status.statusReason': 'No Wide IPs available: No enabled pools available', + vsName: ' ' + }, + { + alternate: 0, + availabilityState: 'offline', + enabledState: 'enabled', + fallback: 0, + name: 'www.srvone.tstest.com', + poolName: '/Common/ts_naptr_pool', + poolType: 'NAPTR', + preferred: 0, + serverName: 'www.srvone.tstest.com', + 'status.statusReason': 'No Wide IPs available: No enabled pools available', + vsName: ' ' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:31eauq04DzrryK91A3Hj92rPJURzuu2MSHLB8Ok/8vY=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_naptrPoolMembers', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + }, + { + allowSelfSignedCert: false, + body: [ + { + alternate: 0, + availabilityState: 'offline', + enabledState: 'enabled', + fallback: 0, + name: 'www.aaaaone.tstest.com', + poolName: '/Common/ts_srv_pool', + poolType: 'SRV', + preferred: 0, + serverName: 'www.aaaaone.tstest.com', + 'status.statusReason': 'No Wide IPs available: No enabled pools available', + vsName: ' ' + }, + { + alternate: 0, + availabilityState: 'offline', + enabledState: 'enabled', + fallback: 0, + name: 'www.aone.tstest.com', + poolName: '/Common/ts_srv_pool', + poolType: 'SRV', + preferred: 0, + serverName: 'www.aone.tstest.com', + 'status.statusReason': 'No Wide IPs available: No enabled pools available', + vsName: ' ' + } + ], + fullURI: 'https://myWorkspace.ods.opinsights.azure.com/api/logs?api-version=2016-04-01', + headers: { + Authorization: 'SharedKey myWorkspace:Z5z3V74HAR5YbEwDUmLj/VLi5kNTDZlfp3d5vWhxybQ=', + 'Content-Type': 'application/json', + 'Log-Type': 'F5Telemetry_srvPoolMembers', + 'x-ms-date': 'Thu, 01 Jan 1970 00:00:00 GMT' + }, + method: 'POST' + } + ] + } ] }; diff --git a/test/unit/consumers/statsdConsumerTests.js b/test/unit/consumers/statsdConsumerTests.js index 54ef33fd..f1794697 100644 --- a/test/unit/consumers/statsdConsumerTests.js +++ b/test/unit/consumers/statsdConsumerTests.js @@ -237,7 +237,7 @@ describe('Statsd', () => { return statsDIndex(context) .then(() => { assert.strictEqual(context.logger.exception.callCount, 1); - assert.deepEqual( + assert.deepStrictEqual( context.logger.exception.firstCall.args, ['Unable to forward to statsd client', 'Connection failure to server'] ); @@ -275,7 +275,7 @@ describe('Statsd', () => { const expectedTags = getExpectedData(true).find( (d) => d.metricName === expectedMetricName ).metricTags; - assert.deepEqual(tracedTags, expectedTags); + assert.deepStrictEqual(tracedTags, expectedTags); }); }); }); diff --git a/test/unit/data/configUtilTests/normalizeDeclarationEndpointsTestsData.js b/test/unit/data/configUtilTests/normalizeDeclarationEndpointsTestsData.js index db59a874..12b9269e 100644 --- a/test/unit/data/configUtilTests/normalizeDeclarationEndpointsTestsData.js +++ b/test/unit/data/configUtilTests/normalizeDeclarationEndpointsTestsData.js @@ -169,6 +169,11 @@ module.exports = { endpoint2: { path: 'sysStats', protocol: 'snmp' + }, + endpoint3: { + path: 'sysStats', + protocol: 'snmp', + numericalEnums: true } } } @@ -249,7 +254,15 @@ module.exports = { enable: true, name: 'endpoint2', path: 'sysStats', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false + }, + endpoint3: { + enable: true, + name: 'endpoint3', + path: 'sysStats', + protocol: 'snmp', + numericalEnums: true } } }, @@ -303,7 +316,8 @@ module.exports = { enable: true, name: 'endpoint2', path: 'sysStats', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } } } @@ -338,7 +352,8 @@ module.exports = { }, enabledSnmpEndpoint1: { path: 'enabledSnmpEndpoint1', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: true } } }, @@ -691,7 +706,8 @@ module.exports = { enable: true, name: 'enabledSnmpEndpoint1', path: 'enabledSnmpEndpoint1', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: true } } }, @@ -763,25 +779,29 @@ module.exports = { enable: true, name: 'enabledSnmpEndpoint1', path: 'enabledSnmpEndpoint1', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: true }, enabledSnmpEndpoint2: { enable: true, name: 'enabledSnmpEndpoint2', path: 'enabledSnmpEndpoint2', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false }, enabledSnmpEndpoint_4: { enable: true, name: 'enabledSnmpEndpoint_4', path: 'enabledSnmpEndpoint.4', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false }, enabledSnmpEndpoint_5: { enable: true, name: 'enabledSnmpEndpoint_5', path: 'enabledSnmpEndpoint.5', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } } }, @@ -841,13 +861,15 @@ module.exports = { enable: true, name: 'enabledSnmpEndpoint1', path: 'enabledSnmpEndpoint1', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: true }, enabledSnmpEndpoint2: { enable: true, name: 'enabledSnmpEndpoint2', path: 'enabledSnmpEndpoint2', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } } }, @@ -901,7 +923,8 @@ module.exports = { enable: true, name: 'enabledSnmpEndpoint', path: 'enabledSnmpEndpoint1.3', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } } }, @@ -973,25 +996,29 @@ module.exports = { enable: true, name: 'enabledSnmpEndpoint', path: 'enabledSnmpEndpoint1.3', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false }, enabledSnmpEndpoint2: { enable: true, name: 'enabledSnmpEndpoint2', path: 'enabledSnmpEndpoint.4', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false }, enabledSnmpEndpoint_4: { enable: true, name: 'enabledSnmpEndpoint_4', path: 'enabledSnmpEndpoint.4', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false }, enabledSnmpEndpoint_5: { enable: true, name: 'enabledSnmpEndpoint_5', path: 'enabledSnmpEndpoint.5', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } } }, @@ -1051,13 +1078,15 @@ module.exports = { enable: true, name: 'enabledSnmpEndpoint', path: 'enabledSnmpEndpoint1.3', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false }, enabledSnmpEndpoint2: { enable: true, name: 'enabledSnmpEndpoint2', path: 'enabledSnmpEndpoint.4', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } } } diff --git a/test/unit/data/configUtilTests/normalizeDeclarationSystemPollerTestsData.js b/test/unit/data/configUtilTests/normalizeDeclarationSystemPollerTestsData.js index 5f536681..9529d2d8 100644 --- a/test/unit/data/configUtilTests/normalizeDeclarationSystemPollerTestsData.js +++ b/test/unit/data/configUtilTests/normalizeDeclarationSystemPollerTestsData.js @@ -581,6 +581,11 @@ module.exports = { snmpEndpoint: { path: '1.2.3.4', protocol: 'snmp' + }, + snmpEndpointWithOptions: { + numericalEnums: true, + path: '1.2.3.4', + protocol: 'snmp' } } } @@ -654,10 +659,18 @@ module.exports = { protocol: 'http' }, snmpEndpoint: { + numericalEnums: false, path: '1.2.3.4', enable: true, name: 'snmpEndpoint', protocol: 'snmp' + }, + snmpEndpointWithOptions: { + numericalEnums: true, + path: '1.2.3.4', + protocol: 'snmp', + enable: true, + name: 'snmpEndpointWithOptions' } }, name: 'My_Poller_1', @@ -730,6 +743,7 @@ module.exports = { protocol: 'http' }, snmpEndpoint: { + numericalEnums: false, path: '1.2.3.4', enable: true, name: 'snmpEndpoint', @@ -913,7 +927,8 @@ module.exports = { path: '1.2.3.4', enable: true, name: 'snmpEndpoint', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } }, name: 'My_Poller_1', @@ -985,7 +1000,8 @@ module.exports = { path: '1.2.3.4', enable: true, name: 'snmpEndpoint', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } }, name: 'My_Poller_1', diff --git a/test/unit/data/configUtilTests/normalizeDeclarationSystemTestsData.js b/test/unit/data/configUtilTests/normalizeDeclarationSystemTestsData.js index f01c5151..1b410233 100644 --- a/test/unit/data/configUtilTests/normalizeDeclarationSystemTestsData.js +++ b/test/unit/data/configUtilTests/normalizeDeclarationSystemTestsData.js @@ -1462,7 +1462,8 @@ module.exports = { path: '1.2.3.4', enable: true, name: 'snmpEndpoint', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false } }, enable: true, diff --git a/test/unit/data/customEndpointsTestsData.js b/test/unit/data/customEndpointsTestsData.js index 899a24bd..7bcf91c0 100644 --- a/test/unit/data/customEndpointsTestsData.js +++ b/test/unit/data/customEndpointsTestsData.js @@ -595,7 +595,7 @@ module.exports = { { endpoint: /\/mgmt\/tm\/util\/bash/, method: 'POST', - request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O qUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', response: { kind: 'tm:util:bash:runstate', commandResult: '' @@ -621,7 +621,7 @@ module.exports = { { endpoint: /\/mgmt\/tm\/util\/bash/, method: 'POST', - request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O qUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', response: { kind: 'tm:util:bash:runstate' } @@ -648,10 +648,10 @@ module.exports = { { endpoint: /\/mgmt\/tm\/util\/bash/, method: 'POST', - request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O qUs -c public localhost sysGlobalStat.sysStatMemoryTotal"', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost sysGlobalStat.sysStatMemoryTotal"', response: { kind: 'tm:util:bash:runstate', - commandResult: 'sysStatMemoryTotal.0 3179282432\n' + commandResult: 'sysStatMemoryTotal.0 = 3179282432\n' } } ] @@ -683,10 +683,10 @@ module.exports = { { endpoint: /\/mgmt\/tm\/util\/bash/, method: 'POST', - request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O qUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', response: { kind: 'tm:util:bash:runstate', - commandResult: 'sysTmmPagesStatSlot.0.0 0\nsysTmmPagesStatSlot.0.1 0\nsysTmmPagesStatTmm.0.0 0\nsysTmmPagesStatTmm.0.1 1\nsysTmmPagesStatPagesUsed.0.0 45869\nsysTmmPagesStatPagesUsed.0.1 50462\nsysTmmPagesStatPagesAvail.0.0 387584\nsysTmmPagesStatPagesAvail.0.1 388608\n' + commandResult: 'sysTmmPagesStatSlot.0.0 = 0\nsysTmmPagesStatSlot.0.1 = 0\nsysTmmPagesStatTmm.0.0 = 0\nsysTmmPagesStatTmm.0.1 = 1\nsysTmmPagesStatPagesUsed.0.0 = 45869\nsysTmmPagesStatPagesUsed.0.1 = 50462\nsysTmmPagesStatPagesAvail.0.0 = 387584\nsysTmmPagesStatPagesAvail.0.1 = 388608\n' } } ] @@ -708,6 +708,15 @@ module.exports = { tmmPages: { path: 'sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry', protocol: 'snmp' + }, + enumToNumeric: { + path: 'ifAdmin.isUp', + protocol: 'snmp', + numericalEnums: true + }, + enumAsIs: { + path: 'ifAdmin.isUp', + protocol: 'snmp' } }, expectedData: { @@ -726,34 +735,58 @@ module.exports = { }, usedMemory: { 'sysStatMemoryUsed.0': 290295264 + }, + enumToNumeric: { + 'ifAdmin.isUp': 1 + }, + enumAsIs: { + 'ifAdmin.isUp': 'true' } }, endpoints: [ { endpoint: /\/mgmt\/tm\/util\/bash/, method: 'POST', - request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O qUs -c public localhost sysGlobalStat.sysStatMemoryUsed"', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost sysGlobalStat.sysStatMemoryUsed"', + response: { + kind: 'tm:util:bash:runstate', + commandResult: 'sysStatMemoryUsed.0 = 290295264\n' + } + }, + { + endpoint: /\/mgmt\/tm\/util\/bash/, + method: 'POST', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost sysGlobalStat.sysStatMemoryTotal"', + response: { + kind: 'tm:util:bash:runstate', + commandResult: 'sysStatMemoryTotal.0 = 3179282432\n' + } + }, + { + endpoint: /\/mgmt\/tm\/util\/bash/, + method: 'POST', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', response: { kind: 'tm:util:bash:runstate', - commandResult: 'sysStatMemoryUsed.0 290295264\n' + commandResult: 'sysTmmPagesStatSlot.0.0 = 0\nsysTmmPagesStatSlot.0.1 = 0\nsysTmmPagesStatTmm.0.0 = 0\nsysTmmPagesStatTmm.0.1 = 1\nsysTmmPagesStatPagesUsed.0.0 = 45869\nsysTmmPagesStatPagesUsed.0.1 = 50462\nsysTmmPagesStatPagesAvail.0.0 = 387584\nsysTmmPagesStatPagesAvail.0.1 = 388608\n' } }, { endpoint: /\/mgmt\/tm\/util\/bash/, method: 'POST', - request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O qUs -c public localhost sysGlobalStat.sysStatMemoryTotal"', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O eQUs -c public localhost ifAdmin.isUp"', response: { kind: 'tm:util:bash:runstate', - commandResult: 'sysStatMemoryTotal.0 3179282432\n' + commandResult: 'ifAdmin.isUp = 1\n' } }, { endpoint: /\/mgmt\/tm\/util\/bash/, method: 'POST', - request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O qUs -c public localhost sysTmmPagesStat.sysTmmPagesStatTable.sysTmmPagesStatEntry"', + request: (body) => body.utilCmdArgs && body.utilCmdArgs === '-c "snmpwalk -L n -O QUs -c public localhost ifAdmin.isUp"', response: { kind: 'tm:util:bash:runstate', - commandResult: 'sysTmmPagesStatSlot.0.0 0\nsysTmmPagesStatSlot.0.1 0\nsysTmmPagesStatTmm.0.0 0\nsysTmmPagesStatTmm.0.1 1\nsysTmmPagesStatPagesUsed.0.0 45869\nsysTmmPagesStatPagesUsed.0.1 50462\nsysTmmPagesStatPagesAvail.0.0 387584\nsysTmmPagesStatPagesAvail.0.1 388608\n' + commandResult: 'ifAdmin.isUp = true\n' } } ] diff --git a/test/unit/declarationTests.js b/test/unit/declarationTests.js index 23ff19e3..581d3c01 100644 --- a/test/unit/declarationTests.js +++ b/test/unit/declarationTests.js @@ -74,7 +74,33 @@ describe('Declarations', () => { files.forEach((file) => { it(`should validate example: ${file}`, () => { const data = JSON.parse(fs.readFileSync(`${baseDir}/${file}`)); - return assert.isFulfilled(declValidator(data)); + return declValidator(data); + }); + }); + }); + + describe('Validate Example Declaration from test/functional/shared/data/declarations', () => { + beforeEach(() => { + // fs access modification to skip folder check + const originFsAccess = fs.access; + sinon.stub(fs, 'access').callsFake(function () { + const path = arguments[0]; + const callback = arguments[arguments.length - 1]; + if (path === 'example_download_folder') { + callback(); + } else { + originFsAccess.apply(null, arguments); + } + }); + coreStub.utilMisc.getRuntimeInfo.nodeVersion = '8.12.0'; + }); + // first let's validate all example declarations + const baseDir = `${__dirname}/../functional/shared/data/declarations`; + const files = fs.readdirSync(baseDir); + files.forEach((file) => { + it(`should validate example: ${file}`, () => { + const data = JSON.parse(fs.readFileSync(`${baseDir}/${file}`)); + return declValidator(data); }); }); }); @@ -1794,6 +1820,12 @@ describe('Declarations', () => { name: 'snmpEndpoint', path: '1.2.3.4', protocol: 'snmp' + }, + { + name: 'snmpEndpointWithOptions', + path: '1.2.3.4', + protocol: 'snmp', + numericalEnums: true } ] } @@ -1854,7 +1886,15 @@ describe('Declarations', () => { enable: true, name: 'snmpEndpoint', path: '1.2.3.4', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: false + }, + { + enable: true, + name: 'snmpEndpointWithOptions', + path: '1.2.3.4', + protocol: 'snmp', + numericalEnums: true } ] ); @@ -2014,7 +2054,8 @@ describe('Declarations', () => { testC: { name: 'c', path: '1.2.3.4', - protocol: 'snmp' + protocol: 'snmp', + numericalEnums: true } } }, @@ -2061,6 +2102,12 @@ describe('Declarations', () => { name: 'hostCpu', path: '1.2.3.4.5', protocol: 'snmp' + }, + { + name: 'hostCpu_2', + path: '1.2.3.4.5', + protocol: 'snmp', + numericalEnums: true } ] } @@ -2095,7 +2142,15 @@ describe('Declarations', () => { name: 'hostCpu', path: '1.2.3.4.5', protocol: 'snmp', - enable: true + enable: true, + numericalEnums: false + }, + { + name: 'hostCpu_2', + path: '1.2.3.4.5', + protocol: 'snmp', + enable: true, + numericalEnums: true } ] ); @@ -3592,6 +3647,7 @@ describe('Declarations', () => { .then((validConfig) => { const endpoints = validConfig.My_Endpoints; assert.strictEqual(endpoints.items.test.protocol, 'http'); + assert.isUndefined(endpoints.items.test.numericalEnums); }); }); @@ -3612,6 +3668,29 @@ describe('Declarations', () => { .then((validConfig) => { const endpoints = validConfig.My_Endpoints; assert.strictEqual(endpoints.items.test.protocol, 'snmp'); + assert.isFalse(endpoints.items.test.numericalEnums); + }); + }); + + it('should allow setting SNMP options', () => { + const data = { + class: 'Telemetry', + My_Endpoints: { + class: 'Telemetry_Endpoints', + items: { + test: { + path: '1.2.3.4', + protocol: 'snmp', + numericalEnums: true + } + } + } + }; + return declValidator(data) + .then((validConfig) => { + const endpoints = validConfig.My_Endpoints; + assert.strictEqual(endpoints.items.test.protocol, 'snmp'); + assert.isTrue(endpoints.items.test.numericalEnums); }); }); @@ -3673,6 +3752,23 @@ describe('Declarations', () => { return assert.isRejected(declValidator(data), /something.*should NOT have additional properties/); }); + it('should not allow SNMP options when protocol is not "snmp"', () => { + const data = { + class: 'Telemetry', + My_Endpoints: { + class: 'Telemetry_Endpoints', + items: { + test: { + name: 'test', + path: '/test/path', + numericalEnums: true + } + } + } + }; + return assert.isRejected(declValidator(data), /should NOT be valid/); + }); + it('should allow full declaration', () => { const data = { class: 'Telemetry', @@ -3705,6 +3801,13 @@ describe('Declarations', () => { path: '1.2.3.4.5', protocol: 'snmp', enable: false + }, + f: { + name: 'testF', + path: '1.2.3.4.5', + protocol: 'snmp', + numericalEnums: true, + enable: false } } } @@ -3736,12 +3839,21 @@ describe('Declarations', () => { name: 'testD', path: '1.2.3.4.5', enable: true, + numericalEnums: false, protocol: 'snmp' }, e: { name: 'testE', path: '1.2.3.4.5', protocol: 'snmp', + numericalEnums: false, + enable: false + }, + f: { + name: 'testF', + path: '1.2.3.4.5', + protocol: 'snmp', + numericalEnums: true, enable: false } }); diff --git a/test/unit/utils/metricsTests.js b/test/unit/utils/metricsTests.js new file mode 100644 index 00000000..65175ffe --- /dev/null +++ b/test/unit/utils/metricsTests.js @@ -0,0 +1,115 @@ +/* + * Copyright 2022. F5 Networks, Inc. See End User License Agreement ("EULA") for + * license terms. Notwithstanding anything to the contrary in the EULA, Licensee + * may copy and modify this software product for its internal business purposes. + * Further, Licensee may upload, publish and distribute the modified version of + * the software product on devcentral.f5.com. + */ + +'use strict'; + +/* eslint-disable import/order */ +const moduleCache = require('../shared/restoreCache')(); + +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); +const sinon = require('sinon'); + +const metricsUtil = require('../../../src/lib/utils/metrics'); + +chai.use(chaiAsPromised); +const assert = chai.assert; + +moduleCache.remember(); + +describe('Metrics Util', () => { + before(() => { + moduleCache.restore(); + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('.parseNumber()', () => { + it('should parse valid numbers', () => { + assert.deepStrictEqual(metricsUtil.parseNumber('0'), 0); + assert.deepStrictEqual(metricsUtil.parseNumber('10'), 10); + assert.deepStrictEqual(metricsUtil.parseNumber('-0'), -0); + assert.deepStrictEqual(metricsUtil.parseNumber('+0'), 0); + assert.deepStrictEqual(metricsUtil.parseNumber('-10'), -10); + assert.deepStrictEqual(metricsUtil.parseNumber('+10'), 10); + assert.deepStrictEqual(metricsUtil.parseNumber('-10.10'), -10.10); + assert.deepStrictEqual(metricsUtil.parseNumber('+10.10'), 10.10); + assert.deepStrictEqual(metricsUtil.parseNumber('10.10'), 10.10); + assert.deepStrictEqual(metricsUtil.parseNumber('+1.2E-38'), 1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumber('-1.2E-38'), -1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumber('1.2E-38'), 1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix 0'), 0); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix 10'), 10); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix -0'), -0); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix +10.10'), 10.10); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix 10.10'), 10.10); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix +1.2E-38'), 1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix0'), 0); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix10'), 10); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix-0'), -0); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix+10'), 10); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix-10.10'), -10.10); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix+10.10'), 10.10); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix-1.2E-38'), -1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix1.2E-38'), 1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumber('0 suffix'), 0); + assert.deepStrictEqual(metricsUtil.parseNumber('10 suffix'), 10); + assert.deepStrictEqual(metricsUtil.parseNumber('-0 suffix'), -0); + assert.deepStrictEqual(metricsUtil.parseNumber('+0 suffix'), 0); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix 100.10 suffix'), 100.1); + assert.deepStrictEqual(metricsUtil.parseNumber('prefix100.10suffix'), 100.1); + }); + + it('should return false when unable to parse number', () => { + assert.isFalse(metricsUtil.parseNumber('false')); + }); + }); + + describe('.parseNumberStrict()', () => { + it('should parse valid numbers', () => { + assert.deepStrictEqual(metricsUtil.parseNumberStrict('0'), 0); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('10'), 10); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('-0'), -0); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('+0'), 0); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('-10'), -10); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('+10'), 10); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('-10.10'), -10.10); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('+10.10'), 10.10); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('10.10'), 10.10); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('+1.2E-38'), 1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('-1.2E-38'), -1.2E-38); + assert.deepStrictEqual(metricsUtil.parseNumberStrict('1.2E-38'), 1.2E-38); + }); + + it('should return false when unable to parse number', () => { + assert.isFalse(metricsUtil.parseNumberStrict('false')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix 0')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix 10')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix -0')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix +10.10')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix 10.10')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix +1.2E-38')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix0')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix10')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix-0')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix+10')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix-10.10')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix+10.10')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix-1.2E-38')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix1.2E-38')); + assert.isFalse(metricsUtil.parseNumberStrict('0 suffix')); + assert.isFalse(metricsUtil.parseNumberStrict('10 suffix')); + assert.isFalse(metricsUtil.parseNumberStrict('-0 suffix')); + assert.isFalse(metricsUtil.parseNumberStrict('+0 suffix')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix 100.10 suffix')); + assert.isFalse(metricsUtil.parseNumberStrict('prefix100.10suffix')); + }); + }); +}); diff --git a/test/unit/utils/normalizeTests.js b/test/unit/utils/normalizeTests.js index 02f699bc..6b3bcca8 100644 --- a/test/unit/utils/normalizeTests.js +++ b/test/unit/utils/normalizeTests.js @@ -484,7 +484,7 @@ describe('Normalize Util', () => { it('should restructure single snmp mib (one stat)', () => { const args = { data: { - commandResult: 'sysStatMemoryTotal.0 3179282432\n' + commandResult: 'sysStatMemoryTotal.0 = 3179282432\n' } }; const actual = normalizeUtil.restructureSNMPEndpoint(args); @@ -493,14 +493,53 @@ describe('Normalize Util', () => { }); }); + it('should restructure single snmp mib (one stat, non-numeric)', () => { + const args = { + data: { + commandResult: 'ifAdmin.isUp = false\n' + } + }; + const actual = normalizeUtil.restructureSNMPEndpoint(args); + assert.deepStrictEqual(actual, { + 'ifAdmin.isUp': 'false' + }); + }); + + it('should ignore invalid response (one stat)', () => { + const args = { + data: { + commandResult: 'ifAdmin.isUp false\n' + } + }; + const actual = normalizeUtil.restructureSNMPEndpoint(args); + assert.deepStrictEqual(actual, {}); + }); + + it('should ignore invalid response (multiple stats)', () => { + const args = { + data: { + commandResult: 'sysTmmPagesStatSlot.0.0 = 0\nsysTmmPagesStatSlot.0.1 = 0\nifAdmin.isUp false\nsysTmmPagesStatTmm.0.0 = 0\nsysTmmPagesStatTmm.0.1 = 1\ninvalidVal = 10.0.0' + } + }; + const actual = normalizeUtil.restructureSNMPEndpoint(args); + assert.deepStrictEqual(actual, { + invalidVal: '10.0.0', + 'sysTmmPagesStatSlot.0.0': 0, + 'sysTmmPagesStatSlot.0.1': 0, + 'sysTmmPagesStatTmm.0.0': 0, + 'sysTmmPagesStatTmm.0.1': 1 + }); + }); + it('should restructure single snmp mib (multiple stats)', () => { const args = { data: { - commandResult: 'sysTmmPagesStatSlot.0.0 0\nsysTmmPagesStatSlot.0.1 0\nsysTmmPagesStatTmm.0.0 0\nsysTmmPagesStatTmm.0.1 1\nsysTmmPagesStatPagesUsed.0.0 45869\nsysTmmPagesStatPagesUsed.0.1 50462\nsysTmmPagesStatPagesAvail.0.0 387584\nsysTmmPagesStatPagesAvail.0.1 388608\n' + commandResult: 'sysTmmPagesStatSlot.0.0 = 0\nsysTmmPagesStatSlot.0.1 = 0\nifAdmin.isUp = false\nsysTmmPagesStatTmm.0.0 = 0\nsysTmmPagesStatTmm.0.1 = 1\nsysTmmPagesStatPagesUsed.0.0 = 45869\nsysTmmPagesStatPagesUsed.0.1 = 50462\nsysTmmPagesStatPagesAvail.0.0 = 387584\nsysTmmPagesStatPagesAvail.0.1 = 388608\n' } }; const actual = normalizeUtil.restructureSNMPEndpoint(args); assert.deepStrictEqual(actual, { + 'ifAdmin.isUp': 'false', 'sysTmmPagesStatSlot.0.0': 0, 'sysTmmPagesStatSlot.0.1': 0, 'sysTmmPagesStatTmm.0.0': 0, diff --git a/test/winstonLogger.js b/test/winstonLogger.js index d43cb357..97a8afe0 100644 --- a/test/winstonLogger.js +++ b/test/winstonLogger.js @@ -101,23 +101,38 @@ const formatter = (options) => `[${options.timestamp()}][${options.level.toUpper + `${maskSecrets(options.meta && Object.keys(options.meta).length ? ('\n' + JSON.stringify(options.meta, null, 4)) : '')}`; // json === false to allow custom formatting -const fileTransport = new (winston.transports.File)({ - name: 'fileOutput', - filename: LOG_FILE, - level: 'debug', - json: false, - options: { - flags: 'w' - }, - timestamp, - formatter -}); - const fileLogger = new (winston.Logger)({ + levels: winston.config.syslog.levels, transports: [ - fileTransport + new (winston.transports.File)({ + name: 'fileOutput', + filename: LOG_FILE, + level: 'debug', + json: false, + options: { + flags: 'w' + }, + timestamp, + formatter + }) ] }); +let mainLogger = fileLogger; + +if (LOG_DST === 'console') { + mainLogger = new (winston.Logger)({ + levels: winston.config.syslog.levels, + transports: [ + new (winston.transports.Console)({ + name: 'consoleOutput', + level: 'debug', + json: false, + timestamp, + formatter + }) + ] + }); +} function hookStream(stream, callback) { stream.write = (function (write) { @@ -151,21 +166,15 @@ if (LOG_DST === 'file') { } module.exports = { - logger: fileLogger, + logger: mainLogger, tsLogger: (function () { - let logger = fileLogger; - if (LOG_DST === 'console') { - logger = console; - } else if (LOG_DST !== 'file') { - logger = null; - } return { - logger, + logger: mainLogger, levels: { finest: 'debug', info: 'info', severe: 'error', - warning: 'warn' + warning: 'warning' } }; }()) diff --git a/versions.json b/versions.json index 52aa2b7e..4e496992 100644 --- a/versions.json +++ b/versions.json @@ -1,7 +1,7 @@ { "versionMetaTimestamp": 1540928503, "latestVersion": { - "name": "1.29 (non-LTS)", + "name": "1.30 (non-LTS)", "url": "/products/extensions/f5-telemetry-streaming/latest/" }, "otherVersions": [